HADOOP-8288. Remove references of mapred.child.ulimit etc. since they are not being used any more (Ravi Prakash via bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1328018 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Joseph Evans 2012-04-19 16:05:34 +00:00
parent 97ae5b675f
commit f6cadd8610
6 changed files with 3 additions and 250 deletions

View File

@ -423,6 +423,9 @@ Release 0.23.3 - UNRELEASED
HADOOP-8108. Move method getHostPortString() from NameNode to NetUtils. HADOOP-8108. Move method getHostPortString() from NameNode to NetUtils.
(Brandon Li via jitendra) (Brandon Li via jitendra)
HADOOP-8288. Remove references of mapred.child.ulimit etc. since they are
not being used any more (Ravi Prakash via bobby)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -81,64 +81,6 @@ abstract public class Shell {
/** If or not script timed out*/ /** If or not script timed out*/
private AtomicBoolean timedOut; private AtomicBoolean timedOut;
/** a Unix command to get ulimit of a process. */
public static final String ULIMIT_COMMAND = "ulimit";
/**
* Get the Unix command for setting the maximum virtual memory available
* to a given child process. This is only relevant when we are forking a
* process from within the Mapper or the Reducer implementations.
* Also see Hadoop Pipes and Hadoop Streaming.
*
* It also checks to ensure that we are running on a *nix platform else
* (e.g. in Cygwin/Windows) it returns <code>null</code>.
* @param memoryLimit virtual memory limit
* @return a <code>String[]</code> with the ulimit command arguments or
* <code>null</code> if we are running on a non *nix platform or
* if the limit is unspecified.
*/
public static String[] getUlimitMemoryCommand(int memoryLimit) {
// ulimit isn't supported on Windows
if (WINDOWS) {
return null;
}
return new String[] {ULIMIT_COMMAND, "-v", String.valueOf(memoryLimit)};
}
/**
* Get the Unix command for setting the maximum virtual memory available
* to a given child process. This is only relevant when we are forking a
* process from within the Mapper or the Reducer implementations.
* see also Hadoop Pipes and Streaming.
*
* It also checks to ensure that we are running on a *nix platform else
* (e.g. in Cygwin/Windows) it returns <code>null</code>.
* @param conf configuration
* @return a <code>String[]</code> with the ulimit command arguments or
* <code>null</code> if we are running on a non *nix platform or
* if the limit is unspecified.
* @deprecated Use {@link #getUlimitMemoryCommand(int)}
*/
@Deprecated
public static String[] getUlimitMemoryCommand(Configuration conf) {
// ulimit isn't supported on Windows
if (WINDOWS) {
return null;
}
// get the memory limit from the configuration
String ulimit = conf.get("mapred.child.ulimit");
if (ulimit == null) {
return null;
}
// Parse it to ensure it is legal/sane
int memoryLimit = Integer.valueOf(ulimit);
return getUlimitMemoryCommand(memoryLimit);
}
/** Set to true on Windows platforms */ /** Set to true on Windows platforms */
public static final boolean WINDOWS /* borrowed from Path.WINDOWS */ public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
= System.getProperty("os.name").startsWith("Windows"); = System.getProperty("os.name").startsWith("Windows");

View File

@ -108,11 +108,6 @@
<value>-server -Xmx640m -Djava.net.preferIPv4Stack=true</value> <value>-server -Xmx640m -Djava.net.preferIPv4Stack=true</value>
</property> </property>
<property>
<name>mapred.child.ulimit</name>
<value>8388608</value>
</property>
<property> <property>
<name>mapred.job.tracker.persist.jobstatus.active</name> <name>mapred.job.tracker.persist.jobstatus.active</name>
<value>true</value> <value>true</value>

View File

@ -314,8 +314,6 @@ Deprecated Properties
*---+---+ *---+---+
|mapred.map.child.log.level | mapreduce.map.log.level |mapred.map.child.log.level | mapreduce.map.log.level
*---+---+ *---+---+
|mapred.map.child.ulimit | mapreduce.map.ulimit
*---+---+
|mapred.map.max.attempts | mapreduce.map.maxattempts |mapred.map.max.attempts | mapreduce.map.maxattempts
*---+---+ *---+---+
|mapred.map.output.compression.codec | mapreduce.map.output.compress.codec |mapred.map.output.compression.codec | mapreduce.map.output.compress.codec
@ -378,8 +376,6 @@ Deprecated Properties
*---+---+ *---+---+
|mapred.reduce.child.log.level | mapreduce.reduce.log.level |mapred.reduce.child.log.level | mapreduce.reduce.log.level
*---+---+ *---+---+
|mapred.reduce.child.ulimit | mapreduce.reduce.ulimit
*---+---+
|mapred.reduce.max.attempts | mapreduce.reduce.maxattempts |mapred.reduce.max.attempts | mapreduce.reduce.maxattempts
*---+---+ *---+---+
|mapred.reduce.parallel.copies | mapreduce.reduce.shuffle.parallelcopies |mapred.reduce.parallel.copies | mapreduce.reduce.shuffle.parallelcopies

View File

@ -1,138 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import java.io.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.util.StringUtils;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* This tests the setting of memory limit for streaming processes.
* This will launch a streaming app which will allocate 10MB memory.
* First, program is launched with sufficient memory. And test expects
* it to succeed. Then program is launched with insufficient memory and
* is expected to be a failure.
*/
public class TestUlimit {
String input = "the dummy input";
Path inputPath = new Path("target/testing/in");
Path outputPath = new Path("target/testing/out");
String map = null;
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
FileSystem fs = null;
private static String SET_MEMORY_LIMIT = "786432"; // 768MB
String[] genArgs(String memLimit) {
String strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
return new String[] {
"-input", inputPath.toString(),
"-output", outputPath.toString(),
"-mapper", map,
"-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
"-numReduceTasks", "0",
"-jobconf", MRJobConfig.NUM_MAPS + "=1",
"-jobconf", JobConf.MAPRED_MAP_TASK_ULIMIT + "=" + memLimit,
"-jobconf", strNamenode,
"-jobconf", "stream.tmpdir=" +
System.getProperty("test.build.data","/tmp"),
"-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
"-jobconf", "mapreduce.framework.name=yarn"
};
}
/**
* This tests the setting of memory limit for streaming processes.
* This will launch a streaming app which will allocate 10MB memory.
* First, program is launched with sufficient memory. And test expects
* it to succeed. Then program is launched with insufficient memory and
* is expected to be a failure.
*/
@Test
@Ignore
public void testCommandLine() {
if (UtilTest.isCygwin()) {
return;
}
try {
final int numSlaves = 2;
Configuration conf = new Configuration();
dfs = new MiniDFSCluster(conf, numSlaves, true, null);
fs = dfs.getFileSystem();
mr = new MiniMRCluster(numSlaves, fs.getUri().toString(), 1);
writeInputFile(fs, inputPath);
map = UtilTest.makeJavaCommand(UlimitApp.class, new String[]{});
runProgram(SET_MEMORY_LIMIT);
fs.delete(outputPath, true);
assertFalse("output not cleaned up", fs.exists(outputPath));
mr.waitUntilIdle();
} catch(IOException e) {
fail(StringUtils.stringifyException(e));
} finally {
if (mr != null) {
mr.shutdown();
}
if (dfs != null) {
dfs.shutdown();
}
}
}
private void writeInputFile(FileSystem fs, Path dir) throws IOException {
DataOutputStream out = fs.create(new Path(dir, "part0"));
out.writeBytes(input);
out.close();
}
/**
* Runs the streaming program. and asserts the result of the program.
* @param memLimit memory limit to set for mapred child.
* @param result Expected result
* @throws IOException
*/
private void runProgram(String memLimit) throws IOException {
boolean mayExit = false;
StreamJob job = new StreamJob(genArgs(memLimit), mayExit);
job.go();
String output = MapReduceTestUtil.readOutput(outputPath,
mr.createJobConf());
assertEquals("output is wrong", SET_MEMORY_LIMIT,
output.trim());
}
public static void main(String[]args) throws Exception
{
new TestUlimit().testCommandLine();
}
}

View File

@ -1,45 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import java.io.*;
/**
* The UlimitApp discards the input
* and exec's ulimit -v to know the ulimit value.
* And writes the output to the standard out.
* @see {@link TestUlimit}
*/
public class UlimitApp {
public static void main(String args[]) throws IOException{
BufferedReader in = new BufferedReader(
new InputStreamReader(System.in));
String line = null;
while ((line = in.readLine()) != null) {}
Process process = Runtime.getRuntime().exec(new String[]{
"bash", "-c", "ulimit -v"});
InputStream is = process.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
while ((line = br.readLine()) != null) {
System.out.println(line);
}
}
}