MAPREDUCE-6057. Remove obsolete entries from mapred-default.xml (Ray Chiang via aw)

This commit is contained in:
Allen Wittenauer 2015-04-26 20:31:40 -07:00
parent 1a2459bd4b
commit 618ba707f0
6 changed files with 9 additions and 43 deletions

View File

@ -105,6 +105,9 @@ Trunk (Unreleased)
MAPREDUCE-6260. Convert site documentation to markdown (Masatake Iwasaki MAPREDUCE-6260. Convert site documentation to markdown (Masatake Iwasaki
via aw) via aw)
MAPREDUCE-6057. Remove obsolete entries from mapred-default.xml
(Ray Chiang via aw)
BUG FIXES BUG FIXES
MAPREDUCE-6191. Improve clearing stale state of Java serialization MAPREDUCE-6191. Improve clearing stale state of Java serialization

View File

@ -978,8 +978,9 @@ public class MapTask extends Task {
throw new IOException( throw new IOException(
"Invalid \"" + JobContext.IO_SORT_MB + "\": " + sortmb); "Invalid \"" + JobContext.IO_SORT_MB + "\": " + sortmb);
} }
sorter = ReflectionUtils.newInstance(job.getClass("map.sort.class", sorter = ReflectionUtils.newInstance(job.getClass(
QuickSort.class, IndexedSorter.class), job); MRJobConfig.MAP_SORT_CLASS, QuickSort.class,
IndexedSorter.class), job);
// buffers and accounting // buffers and accounting
int maxMemUsage = sortmb << 20; int maxMemUsage = sortmb << 20;
maxMemUsage -= maxMemUsage % METASIZE; maxMemUsage -= maxMemUsage % METASIZE;

View File

@ -28,6 +28,9 @@ import org.apache.hadoop.yarn.util.Apps;
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface MRJobConfig { public interface MRJobConfig {
// Used by MapTask
public static final String MAP_SORT_CLASS = "map.sort.class";
// Put all of the attribute names in here so that Job and JobContext are // Put all of the attribute names in here so that Job and JobContext are
// consistent. // consistent.
public static final String INPUT_FORMAT_CLASS_ATTR = "mapreduce.job.inputformat.class"; public static final String INPUT_FORMAT_CLASS_ATTR = "mapreduce.job.inputformat.class";

View File

@ -57,13 +57,6 @@
set to less than .5</description> set to less than .5</description>
</property> </property>
<property>
<name>mapreduce.local.clientfactory.class.name</name>
<value>org.apache.hadoop.mapred.LocalClientFactory</value>
<description>This the client factory that is responsible for
creating local job runner client</description>
</property>
<property> <property>
<name>mapreduce.job.maps</name> <name>mapreduce.job.maps</name>
<value>2</value> <value>2</value>
@ -883,30 +876,6 @@
</description> </description>
</property> </property>
<property>
<name>mapreduce.map.skip.proc.count.autoincr</name>
<value>true</value>
<description> The flag which if set to true,
SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS is incremented
by MapRunner after invoking the map function. This value must be set to
false for applications which process the records asynchronously
or buffer the input records. For example streaming.
In such cases applications should increment this counter on their own.
</description>
</property>
<property>
<name>mapreduce.reduce.skip.proc.count.autoincr</name>
<value>true</value>
<description> The flag which if set to true,
SkipBadRecords.COUNTER_REDUCE_PROCESSED_GROUPS is incremented
by framework after invoking the reduce function. This value must be set to
false for applications which process the records asynchronously
or buffer the input records. For example streaming.
In such cases applications should increment this counter on their own.
</description>
</property>
<property> <property>
<name>mapreduce.job.skip.outdir</name> <name>mapreduce.job.skip.outdir</name>
<value></value> <value></value>

View File

@ -93,7 +93,6 @@
<property><!--Loaded from job.xml--><name>mapreduce.reduce.input.buffer.percent</name><value>0.0</value></property> <property><!--Loaded from job.xml--><name>mapreduce.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.map.output.compress.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property> <property><!--Loaded from job.xml--><name>mapreduce.map.output.compress.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
<property><!--Loaded from job.xml--><name>yarn.resourcemanager.delegation-token.keepalive-time-ms</name><value>300000</value></property> <property><!--Loaded from job.xml--><name>yarn.resourcemanager.delegation-token.keepalive-time-ms</name><value>300000</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.map.skip.proc.count.autoincr</name><value>true</value></property>
<property><!--Loaded from job.xml--><name>dfs.datanode.directoryscan.threads</name><value>1</value></property> <property><!--Loaded from job.xml--><name>dfs.datanode.directoryscan.threads</name><value>1</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.jobtracker.address</name><value>local</value></property> <property><!--Loaded from job.xml--><name>mapreduce.jobtracker.address</name><value>local</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.cluster.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property> <property><!--Loaded from job.xml--><name>mapreduce.cluster.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
@ -197,7 +196,6 @@
<property><!--Loaded from job.xml--><name>dfs.block.access.key.update.interval</name><value>600</value></property> <property><!--Loaded from job.xml--><name>dfs.block.access.key.update.interval</name><value>600</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.jobhistory.move.interval-ms</name><value>30000</value></property> <property><!--Loaded from job.xml--><name>mapreduce.jobhistory.move.interval-ms</name><value>30000</value></property>
<property><!--Loaded from job.xml--><name>dfs.datanode.dns.interface</name><value>default</value></property> <property><!--Loaded from job.xml--><name>dfs.datanode.dns.interface</name><value>default</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.reduce.skip.proc.count.autoincr</name><value>true</value></property>
<property><!--Loaded from job.xml--><name>dfs.namenode.backup.http-address</name><value>0.0.0.0:50105</value></property> <property><!--Loaded from job.xml--><name>dfs.namenode.backup.http-address</name><value>0.0.0.0:50105</value></property>
<property><!--Loaded from job.xml--><name>yarn.nodemanager.container-monitor.interval-ms</name><value>3000</value></property> <property><!--Loaded from job.xml--><name>yarn.nodemanager.container-monitor.interval-ms</name><value>3000</value></property>
<property><!--Loaded from job.xml--><name>mapred.reducer.new-api</name><value>true</value></property> <property><!--Loaded from job.xml--><name>mapred.reducer.new-api</name><value>true</value></property>
@ -290,7 +288,6 @@
<property><!--Loaded from job.xml--><name>yarn.app.mapreduce.am.command-opts</name><value>-Xmx500m</value></property> <property><!--Loaded from job.xml--><name>yarn.app.mapreduce.am.command-opts</name><value>-Xmx500m</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.admin.user.env</name><value>LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native</value></property> <property><!--Loaded from job.xml--><name>mapreduce.admin.user.env</name><value>LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native</value></property>
<property><!--Loaded from job.xml--><name>dfs.namenode.checkpoint.edits.dir</name><value>${dfs.namenode.checkpoint.dir}</value></property> <property><!--Loaded from job.xml--><name>dfs.namenode.checkpoint.edits.dir</name><value>${dfs.namenode.checkpoint.dir}</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.local.clientfactory.class.name</name><value>org.apache.hadoop.mapred.LocalClientFactory</value></property>
<property><!--Loaded from job.xml--><name>hadoop.common.configuration.version</name><value>0.23.0</value></property> <property><!--Loaded from job.xml--><name>hadoop.common.configuration.version</name><value>0.23.0</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.tasktracker.dns.interface</name><value>default</value></property> <property><!--Loaded from job.xml--><name>mapreduce.tasktracker.dns.interface</name><value>default</value></property>
<property><!--Loaded from job.xml--><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerialization</value></property> <property><!--Loaded from job.xml--><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerialization</value></property>
@ -380,7 +377,6 @@
<property><!--Loaded from job.xml--><name>fs.AbstractFileSystem.viewfs.impl</name><value>org.apache.hadoop.fs.viewfs.ViewFs</value></property> <property><!--Loaded from job.xml--><name>fs.AbstractFileSystem.viewfs.impl</name><value>org.apache.hadoop.fs.viewfs.ViewFs</value></property>
<property><!--Loaded from job.xml--><name>yarn.resourcemanager.resource-tracker.client.thread-count</name><value>50</value></property> <property><!--Loaded from job.xml--><name>yarn.resourcemanager.resource-tracker.client.thread-count</name><value>50</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.tasktracker.dns.nameserver</name><value>default</value></property> <property><!--Loaded from job.xml--><name>mapreduce.tasktracker.dns.nameserver</name><value>default</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.clientfactory.class.name</name><value>org.apache.hadoop.mapred.YarnClientFactory</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.map.output.compress</name><value>false</value></property> <property><!--Loaded from job.xml--><name>mapreduce.map.output.compress</name><value>false</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.job.counters.limit</name><value>120</value></property> <property><!--Loaded from job.xml--><name>mapreduce.job.counters.limit</name><value>120</value></property>
<property><!--Loaded from job.xml--><name>dfs.datanode.ipc.address</name><value>0.0.0.0:50020</value></property> <property><!--Loaded from job.xml--><name>dfs.datanode.ipc.address</name><value>0.0.0.0:50020</value></property>

View File

@ -4645,7 +4645,6 @@
"dfs.ha.log-roll.period" : "120", "dfs.ha.log-roll.period" : "120",
"mapreduce.reduce.input.buffer.percent" : "0.0", "mapreduce.reduce.input.buffer.percent" : "0.0",
"mapreduce.map.output.compress.codec" : "org.apache.hadoop.io.compress.SnappyCodec", "mapreduce.map.output.compress.codec" : "org.apache.hadoop.io.compress.SnappyCodec",
"mapreduce.map.skip.proc.count.autoincr" : "true",
"dfs.client.failover.sleep.base.millis" : "500", "dfs.client.failover.sleep.base.millis" : "500",
"dfs.datanode.directoryscan.threads" : "1", "dfs.datanode.directoryscan.threads" : "1",
"mapreduce.jobtracker.address" : "neededForHive:999999", "mapreduce.jobtracker.address" : "neededForHive:999999",
@ -4765,7 +4764,6 @@
"dfs.namenode.backup.address" : "0.0.0.0:50100", "dfs.namenode.backup.address" : "0.0.0.0:50100",
"hadoop.util.hash.type" : "murmur", "hadoop.util.hash.type" : "murmur",
"dfs.block.access.key.update.interval" : "600", "dfs.block.access.key.update.interval" : "600",
"mapreduce.reduce.skip.proc.count.autoincr" : "true",
"dfs.datanode.dns.interface" : "default", "dfs.datanode.dns.interface" : "default",
"dfs.datanode.use.datanode.hostname" : "false", "dfs.datanode.use.datanode.hostname" : "false",
"mapreduce.job.output.key.class" : "org.apache.hadoop.io.Text", "mapreduce.job.output.key.class" : "org.apache.hadoop.io.Text",
@ -4875,7 +4873,6 @@
"mapreduce.reduce.shuffle.read.timeout" : "180000", "mapreduce.reduce.shuffle.read.timeout" : "180000",
"mapreduce.admin.user.env" : "LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native", "mapreduce.admin.user.env" : "LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native",
"yarn.app.mapreduce.am.command-opts" : " -Xmx1238932873", "yarn.app.mapreduce.am.command-opts" : " -Xmx1238932873",
"mapreduce.local.clientfactory.class.name" : "org.apache.hadoop.mapred.LocalClientFactory",
"dfs.namenode.checkpoint.edits.dir" : "${dfs.namenode.checkpoint.dir}", "dfs.namenode.checkpoint.edits.dir" : "${dfs.namenode.checkpoint.dir}",
"fs.permissions.umask-mode" : "022", "fs.permissions.umask-mode" : "022",
"dfs.client.domain.socket.data.traffic" : "false", "dfs.client.domain.socket.data.traffic" : "false",
@ -9754,7 +9751,6 @@
"dfs.ha.log-roll.period" : "120", "dfs.ha.log-roll.period" : "120",
"mapreduce.reduce.input.buffer.percent" : "0.0", "mapreduce.reduce.input.buffer.percent" : "0.0",
"mapreduce.map.output.compress.codec" : "org.apache.hadoop.io.compress.SnappyCodec", "mapreduce.map.output.compress.codec" : "org.apache.hadoop.io.compress.SnappyCodec",
"mapreduce.map.skip.proc.count.autoincr" : "true",
"dfs.client.failover.sleep.base.millis" : "500", "dfs.client.failover.sleep.base.millis" : "500",
"dfs.datanode.directoryscan.threads" : "1", "dfs.datanode.directoryscan.threads" : "1",
"mapreduce.jobtracker.address" : "neededForHive:999999", "mapreduce.jobtracker.address" : "neededForHive:999999",
@ -9874,7 +9870,6 @@
"dfs.namenode.backup.address" : "0.0.0.0:50100", "dfs.namenode.backup.address" : "0.0.0.0:50100",
"hadoop.util.hash.type" : "murmur", "hadoop.util.hash.type" : "murmur",
"dfs.block.access.key.update.interval" : "600", "dfs.block.access.key.update.interval" : "600",
"mapreduce.reduce.skip.proc.count.autoincr" : "true",
"dfs.datanode.dns.interface" : "default", "dfs.datanode.dns.interface" : "default",
"dfs.datanode.use.datanode.hostname" : "false", "dfs.datanode.use.datanode.hostname" : "false",
"mapreduce.job.output.key.class" : "org.apache.hadoop.io.Text", "mapreduce.job.output.key.class" : "org.apache.hadoop.io.Text",
@ -9984,7 +9979,6 @@
"mapreduce.reduce.shuffle.read.timeout" : "180000", "mapreduce.reduce.shuffle.read.timeout" : "180000",
"mapreduce.admin.user.env" : "LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native", "mapreduce.admin.user.env" : "LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native",
"yarn.app.mapreduce.am.command-opts" : " -Xmx1238932873", "yarn.app.mapreduce.am.command-opts" : " -Xmx1238932873",
"mapreduce.local.clientfactory.class.name" : "org.apache.hadoop.mapred.LocalClientFactory",
"dfs.namenode.checkpoint.edits.dir" : "${dfs.namenode.checkpoint.dir}", "dfs.namenode.checkpoint.edits.dir" : "${dfs.namenode.checkpoint.dir}",
"fs.permissions.umask-mode" : "022", "fs.permissions.umask-mode" : "022",
"dfs.client.domain.socket.data.traffic" : "false", "dfs.client.domain.socket.data.traffic" : "false",