MAPREDUCE-5790. Made it easier to enable hprof profile options by default. Contributed by Gera Shegalov.

svn merge --ignore-ancestry -c 1610578 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1610579 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2014-07-15 04:49:31 +00:00
parent bfefa9301f
commit 2c2c589700
4 changed files with 39 additions and 27 deletions

View File

@ -92,6 +92,9 @@ Release 2.5.0 - UNRELEASED
MAPREDUCE-5844. Add a configurable delay to reducer-preemption. MAPREDUCE-5844. Add a configurable delay to reducer-preemption.
(Maysam Yabandeh via kasha) (Maysam Yabandeh via kasha)
MAPREDUCE-5790. Made it easier to enable hprof profile options by default.
(Gera Shegalov via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -1045,7 +1045,7 @@
<property> <property>
<name>mapreduce.task.profile.params</name> <name>mapreduce.task.profile.params</name>
<value></value> <value>-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s</value>
<description>JVM profiler parameters used to profile map and reduce task <description>JVM profiler parameters used to profile map and reduce task
attempts. This string may contain a single format specifier %s that will attempts. This string may contain a single format specifier %s that will
be replaced by the path to profile.out in the task attempt log directory. be replaced by the path to profile.out in the task attempt log directory.

View File

@ -29,11 +29,7 @@ public class TestJobConf {
@Test @Test
public void testProfileParamsDefaults() { public void testProfileParamsDefaults() {
JobConf configuration = new JobConf(); JobConf configuration = new JobConf();
Assert.assertNull(configuration.get(MRJobConfig.TASK_PROFILE_PARAMS));
String result = configuration.getProfileParams(); String result = configuration.getProfileParams();
Assert.assertNotNull(result); Assert.assertNotNull(result);
Assert.assertTrue(result.contains("file=%s")); Assert.assertTrue(result.contains("file=%s"));
Assert.assertTrue(result.startsWith("-agentlib:hprof")); Assert.assertTrue(result.startsWith("-agentlib:hprof"));

View File

@ -24,6 +24,7 @@ import java.util.*;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@ -39,8 +40,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.junit.After; import org.junit.BeforeClass;
import org.junit.Before;
import org.junit.Test; import org.junit.Test;
public class TestMRJobsWithProfiler { public class TestMRJobsWithProfiler {
@ -51,6 +51,8 @@ public class TestMRJobsWithProfiler {
private static final EnumSet<RMAppState> TERMINAL_RM_APP_STATES = private static final EnumSet<RMAppState> TERMINAL_RM_APP_STATES =
EnumSet.of(RMAppState.FINISHED, RMAppState.FAILED, RMAppState.KILLED); EnumSet.of(RMAppState.FINISHED, RMAppState.FAILED, RMAppState.KILLED);
private static final int PROFILED_TASK_ID = 1;
private static MiniMRYarnCluster mrCluster; private static MiniMRYarnCluster mrCluster;
private static final Configuration CONF = new Configuration(); private static final Configuration CONF = new Configuration();
@ -69,8 +71,8 @@ public class TestMRJobsWithProfiler {
private static final Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar"); private static final Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar");
@Before @BeforeClass
public void setup() throws InterruptedException, IOException { public static void setup() throws InterruptedException, IOException {
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
@ -79,7 +81,7 @@ public class TestMRJobsWithProfiler {
} }
if (mrCluster == null) { if (mrCluster == null) {
mrCluster = new MiniMRYarnCluster(getClass().getName()); mrCluster = new MiniMRYarnCluster(TestMRJobsWithProfiler.class.getName());
mrCluster.init(CONF); mrCluster.init(CONF);
mrCluster.start(); mrCluster.start();
} }
@ -90,8 +92,8 @@ public class TestMRJobsWithProfiler {
localFs.setPermission(APP_JAR, new FsPermission("700")); localFs.setPermission(APP_JAR, new FsPermission("700"));
} }
@After @AfterClass
public void tearDown() { public static void tearDown() {
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
+ " not found. Not running test."); + " not found. Not running test.");
@ -103,10 +105,19 @@ public class TestMRJobsWithProfiler {
} }
} }
@Test (timeout = 150000)
public void testDefaultProfiler() throws Exception {
LOG.info("Starting testDefaultProfiler");
testProfilerInternal(true);
}
@Test (timeout = 150000) @Test (timeout = 150000)
public void testProfiler() throws IOException, InterruptedException, public void testDifferentProfilers() throws Exception {
ClassNotFoundException { LOG.info("Starting testDefaultProfiler");
testProfilerInternal(false);
}
private void testProfilerInternal(boolean useDefault) throws Exception {
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
+ " not found. Not running test."); + " not found. Not running test.");
@ -117,18 +128,19 @@ public class TestMRJobsWithProfiler {
final JobConf sleepConf = new JobConf(mrCluster.getConfig()); final JobConf sleepConf = new JobConf(mrCluster.getConfig());
sleepConf.setProfileEnabled(true); sleepConf.setProfileEnabled(true);
// profile map split 1 sleepConf.setProfileTaskRange(true, String.valueOf(PROFILED_TASK_ID));
sleepConf.setProfileTaskRange(true, "1"); sleepConf.setProfileTaskRange(false, String.valueOf(PROFILED_TASK_ID));
// profile reduce of map output partitions 1
sleepConf.setProfileTaskRange(false, "1");
// use hprof for map to profile.out if (!useDefault) {
sleepConf.set(MRJobConfig.TASK_MAP_PROFILE_PARAMS, // use hprof for map to profile.out
"-agentlib:hprof=cpu=times,heap=sites,force=n,thread=y,verbose=n," sleepConf.set(MRJobConfig.TASK_MAP_PROFILE_PARAMS,
+ "file=%s"); "-agentlib:hprof=cpu=times,heap=sites,force=n,thread=y,verbose=n,"
+ "file=%s");
// use Xprof for reduce to stdout
sleepConf.set(MRJobConfig.TASK_REDUCE_PROFILE_PARAMS, "-Xprof");
}
// use Xprof for reduce to stdout
sleepConf.set(MRJobConfig.TASK_REDUCE_PROFILE_PARAMS, "-Xprof");
sleepJob.setConf(sleepConf); sleepJob.setConf(sleepConf);
// 2-map-2-reduce SleepJob // 2-map-2-reduce SleepJob
@ -205,8 +217,8 @@ public class TestMRJobsWithProfiler {
TaskLog.LogName.PROFILE.toString()); TaskLog.LogName.PROFILE.toString());
final Path stdoutPath = new Path(dirEntry.getValue(), final Path stdoutPath = new Path(dirEntry.getValue(),
TaskLog.LogName.STDOUT.toString()); TaskLog.LogName.STDOUT.toString());
if (tid.getTaskType() == TaskType.MAP) { if (useDefault || tid.getTaskType() == TaskType.MAP) {
if (tid.getTaskID().getId() == 1) { if (tid.getTaskID().getId() == PROFILED_TASK_ID) {
// verify profile.out // verify profile.out
final BufferedReader br = new BufferedReader(new InputStreamReader( final BufferedReader br = new BufferedReader(new InputStreamReader(
localFs.open(profilePath))); localFs.open(profilePath)));
@ -222,7 +234,8 @@ public class TestMRJobsWithProfiler {
} else { } else {
Assert.assertFalse("hprof file should not exist", Assert.assertFalse("hprof file should not exist",
localFs.exists(profilePath)); localFs.exists(profilePath));
if (tid.getTaskID().getId() == 1) { if (tid.getTaskID().getId() == PROFILED_TASK_ID) {
// reducer is profiled with Xprof
final BufferedReader br = new BufferedReader(new InputStreamReader( final BufferedReader br = new BufferedReader(new InputStreamReader(
localFs.open(stdoutPath))); localFs.open(stdoutPath)));
boolean flatProfFound = false; boolean flatProfFound = false;