MAPREDUCE-4375. Show Configuration Tracability in MR UI (bobby via tgraves)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1367539 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
556be2af92
commit
a3e695ab2c
|
@ -505,6 +505,9 @@ Release 0.23.3 - UNRELEASED
|
|||
|
||||
MAPREDUCE-4267. mavenize pipes (tgraves via bobby)
|
||||
|
||||
MAPREDUCE-4375. Show Configuration Tracability in MR UI (bobby
|
||||
via tgraves)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
MAPREDUCE-3850. Avoid redundant calls for tokens in TokenCache (Daryn
|
||||
|
|
|
@ -1561,7 +1561,7 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job,
|
|||
Path confPath = getConfFile();
|
||||
FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
|
||||
Configuration jobConf = new Configuration(false);
|
||||
jobConf.addResource(fc.open(confPath));
|
||||
jobConf.addResource(fc.open(confPath), confPath.toString());
|
||||
return jobConf;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,14 +78,29 @@ public class ConfBlock extends HtmlBlock {
|
|||
tr().
|
||||
th(_TH, "key").
|
||||
th(_TH, "value").
|
||||
th(_TH, "source chain").
|
||||
_().
|
||||
_().
|
||||
tbody();
|
||||
for (ConfEntryInfo entry : info.getProperties()) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
String[] sources = entry.getSource();
|
||||
//Skip the last entry, because it is always the same HDFS file, and
|
||||
// output them in reverse order so most recent is output first
|
||||
boolean first = true;
|
||||
for(int i = (sources.length - 2); i >= 0; i--) {
|
||||
if(!first) {
|
||||
// \u2B05 is an arrow <--
|
||||
buffer.append(" \u2B05 ");
|
||||
}
|
||||
first = false;
|
||||
buffer.append(sources[i]);
|
||||
}
|
||||
tbody.
|
||||
tr().
|
||||
td(entry.getName()).
|
||||
td(entry.getValue()).
|
||||
td(buffer.toString()).
|
||||
_();
|
||||
}
|
||||
tbody._().
|
||||
|
@ -93,6 +108,7 @@ public class ConfBlock extends HtmlBlock {
|
|||
tr().
|
||||
th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
|
||||
th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
|
||||
th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
|
||||
_().
|
||||
_().
|
||||
_();
|
||||
|
|
|
@ -27,13 +27,19 @@ public class ConfEntryInfo {
|
|||
|
||||
protected String name;
|
||||
protected String value;
|
||||
protected String[] source;
|
||||
|
||||
public ConfEntryInfo() {
|
||||
}
|
||||
|
||||
public ConfEntryInfo(String key, String value) {
|
||||
this(key, value, null);
|
||||
}
|
||||
|
||||
public ConfEntryInfo(String key, String value, String[] source) {
|
||||
this.name = key;
|
||||
this.value = value;
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
|
@ -43,4 +49,8 @@ public class ConfEntryInfo {
|
|||
public String getValue() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
public String[] getSource() {
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,8 @@ public class ConfInfo {
|
|||
Configuration jobConf = job.loadConfFile();
|
||||
this.path = job.getConfFile().toString();
|
||||
for (Map.Entry<String, String> entry : jobConf) {
|
||||
this.property.add(new ConfEntryInfo(entry.getKey(), entry.getValue()));
|
||||
this.property.add(new ConfEntryInfo(entry.getKey(), entry.getValue(),
|
||||
jobConf.getPropertySources(entry.getKey())));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -603,7 +603,7 @@ public class MockJobs extends MockApps {
|
|||
public Configuration loadConfFile() throws IOException {
|
||||
FileContext fc = FileContext.getFileContext(configFile.toUri(), conf);
|
||||
Configuration jobConf = new Configuration(false);
|
||||
jobConf.addResource(fc.open(configFile));
|
||||
jobConf.addResource(fc.open(configFile), configFile.toString());
|
||||
return jobConf;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -336,7 +336,7 @@ public class HistoryFileManager extends AbstractService {
|
|||
public synchronized Configuration loadConfFile() throws IOException {
|
||||
FileContext fc = FileContext.getFileContext(confFile.toUri(), conf);
|
||||
Configuration jobConf = new Configuration(false);
|
||||
jobConf.addResource(fc.open(confFile));
|
||||
jobConf.addResource(fc.open(confFile), confFile.toString());
|
||||
return jobConf;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1261,6 +1261,9 @@ History Server REST API's.
|
|||
*---------------+--------------+-------------------------------+
|
||||
| value | string | The value of the configuration property |
|
||||
*---------------+--------------+-------------------------------+
|
||||
| source | string | The location this configuration object came from. If there is more then one of these it shows the history with the latest source at the end of the list. |
|
||||
*---------------+--------------+-------------------------------+
|
||||
|
||||
|
||||
*** Response Examples
|
||||
|
||||
|
@ -1293,14 +1296,17 @@ History Server REST API's.
|
|||
{
|
||||
"value" : "/home/hadoop/hdfs/data",
|
||||
"name" : "dfs.datanode.data.dir"
|
||||
"source" : ["hdfs-site.xml", "job.xml"]
|
||||
},
|
||||
{
|
||||
"value" : "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer",
|
||||
"name" : "hadoop.http.filter.initializers"
|
||||
"source" : ["programatically", "job.xml"]
|
||||
},
|
||||
{
|
||||
"value" : "/home/hadoop/tmp",
|
||||
"name" : "mapreduce.cluster.temp.dir"
|
||||
"source" : ["mapred-site.xml"]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
@ -1335,14 +1341,19 @@ History Server REST API's.
|
|||
<property>
|
||||
<name>dfs.datanode.data.dir</name>
|
||||
<value>/home/hadoop/hdfs/data</value>
|
||||
<source>hdfs-site.xml</source>
|
||||
<source>job.xml</source>
|
||||
</property>
|
||||
<property>
|
||||
<name>hadoop.http.filter.initializers</name>
|
||||
<value>org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer</value>
|
||||
<source>programatically</source>
|
||||
<source>job.xml</source>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapreduce.cluster.temp.dir</name>
|
||||
<value>/home/hadoop/tmp</value>
|
||||
<source>mapred-site.xml</source>
|
||||
</property>
|
||||
...
|
||||
</conf>
|
||||
|
|
|
@ -1296,6 +1296,8 @@ MapReduce Application Master REST API's.
|
|||
*---------------+--------------+-------------------------------+
|
||||
| value | string | The value of the configuration property |
|
||||
*---------------+--------------+-------------------------------+
|
||||
| source | string | The location this configuration object came from. If there is more then one of these it shows the history with the latest source at the end of the list. |
|
||||
*---------------+--------------+-------------------------------+
|
||||
|
||||
** Response Examples
|
||||
|
||||
|
@ -1327,15 +1329,18 @@ MapReduce Application Master REST API's.
|
|||
"property" : [
|
||||
{
|
||||
"value" : "/home/hadoop/hdfs/data",
|
||||
"name" : "dfs.datanode.data.dir"
|
||||
"name" : "dfs.datanode.data.dir",
|
||||
"source" : ["hdfs-site.xml", "job.xml"]
|
||||
},
|
||||
{
|
||||
"value" : "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer",
|
||||
"name" : "hadoop.http.filter.initializers"
|
||||
"source" : ["programatically", "job.xml"]
|
||||
},
|
||||
{
|
||||
"value" : "/home/hadoop/tmp",
|
||||
"name" : "mapreduce.cluster.temp.dir"
|
||||
"source" : ["mapred-site.xml"]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
@ -1370,14 +1375,19 @@ MapReduce Application Master REST API's.
|
|||
<property>
|
||||
<name>dfs.datanode.data.dir</name>
|
||||
<value>/home/hadoop/hdfs/data</value>
|
||||
<source>hdfs-site.xml</source>
|
||||
<source>job.xml</source>
|
||||
</property>
|
||||
<property>
|
||||
<name>hadoop.http.filter.initializers</name>
|
||||
<value>org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer</value>
|
||||
<source>programatically</source>
|
||||
<source>job.xml</source>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapreduce.cluster.temp.dir</name>
|
||||
<value>/home/hadoop/tmp</value>
|
||||
<source>mapred-site.xml</source>
|
||||
</property>
|
||||
...
|
||||
</conf>
|
||||
|
|
Loading…
Reference in New Issue