From cea1c71087e886c3b1e548c3ae900e9a0e395477 Mon Sep 17 00:00:00 2001 From: Colin Patrick Mccabe Date: Thu, 14 Jan 2016 11:02:34 -0800 Subject: [PATCH] HADOOP-8887. Use a Maven plugin to build the native code using CMake (cmccabe) (cherry picked from commit b1ed28fa77cb2fab80c54f9dfeb5d8b7139eca34) Conflicts: hadoop-common-project/hadoop-common/pom.xml hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml --- BUILDING.txt | 4 + .../hadoop-common/CHANGES.txt | 3 + hadoop-common-project/hadoop-common/pom.xml | 48 +-- .../hadoop-hdfs-native-client/pom.xml | 35 +- .../plugin/cmakebuilder/CompileMojo.java | 269 ++++++++++++ .../maven/plugin/cmakebuilder/TestMojo.java | 383 ++++++++++++++++++ .../apache/hadoop/maven/plugin/util/Exec.java | 41 +- hadoop-tools/hadoop-pipes/pom.xml | 64 +-- .../hadoop-yarn-server-nodemanager/pom.xml | 45 +- .../test/test-container-executor.c | 7 + 10 files changed, 775 insertions(+), 124 deletions(-) create mode 100644 hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java create mode 100644 hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java diff --git a/BUILDING.txt b/BUILDING.txt index d8cd210c31b..3982d7f6078 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -175,6 +175,10 @@ Maven build goals: * -Dtest=,,.... * -Dtest.exclude= * -Dtest.exclude.pattern=**/.java,**/.java + * To run all native unit tests, use: mvn test -Pnative -Dtest=allNative + * To run a specific native unit test, use: mvn test -Pnative -Dtest= + For example, to run test_bulk_crc32, you would use: + mvn test -Pnative -Dtest=test_bulk_crc32 ---------------------------------------------------------------------------------- Building components separately diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 358c3618af3..d6df072cbb0 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -25,6 +25,9 @@ Release 2.9.0 - UNRELEASED HADOOP-12683. Add number of samples in last interval in snapshot of MutableStat. (Vikram Srivastava via kasha) + HADOOP-8887. Use a Maven plugin to build the native code using CMake + (cmccabe) + BUG FIXES HADOOP-12655. TestHttpServer.testBindAddress bind port range is wider diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 4bd125904ce..3cfcfa2fd89 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -581,39 +581,39 @@ - org.apache.maven.plugins - maven-antrun-plugin + org.apache.hadoop + hadoop-maven-plugins - make + cmake-compile compile - run + cmake-compile - - - - - - - - - - + ${basedir}/src + + ${project.build.directory}/native/javah + ${sun.arch.data.model} + ${require.bzip2} + ${require.snappy} + ${snappy.prefix} + ${snappy.lib} + ${snappy.include} + ${require.openssl} + ${openssl.prefix} + ${openssl.lib} + ${openssl.include} + ${extra.libhadoop.rpath} + - native_tests + test_bulk_crc32 + cmake-test test - run - - - - - - - + ${project.build.directory}/native/test_bulk_crc32 + 1200 + ${project.build.directory}/native-results diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml index b578667bd60..fbc5efe99b8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml @@ -186,25 +186,30 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> + + org.apache.hadoop + hadoop-maven-plugins + + + cmake-compile + compile + cmake-compile + + ${basedir}/src + + ${project.build.directory}/native/javah + ${sun.arch.data.model} + ${require.fuse} + + ${project.build.directory} + + + + org.apache.maven.plugins maven-antrun-plugin - - make - compile - run - - - - - - - - - - - native_tests test diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java new file mode 100644 index 00000000000..e44bcf857fe --- /dev/null +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java @@ -0,0 +1,269 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.maven.plugin.cmakebuilder; + +import org.apache.hadoop.maven.plugin.util.Exec.OutputBufferThread; +import org.apache.hadoop.maven.plugin.util.Exec; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.LifecyclePhase; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.concurrent.TimeUnit; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +/** + * Goal which builds the native sources + */ +@Mojo(name="cmake-compile", defaultPhase = LifecyclePhase.COMPILE) +public class CompileMojo extends AbstractMojo { + private static int availableProcessors = + Runtime.getRuntime().availableProcessors(); + + /** + * Location of the build products. + */ + @Parameter(defaultValue="${project.build.directory}/native") + private File output; + + /** + * Location of the source files. + * This should be where the sources are checked in. + */ + @Parameter(defaultValue="${basedir}/src/main/native", required=true) + private File source; + + /** + * CMake build target. + */ + @Parameter + private String target; + + /** + * Environment variables to pass to CMake. + * + * Note that it is usually better to use a CMake variable than an environment + * variable. To quote the CMake FAQ: + * + * "One should avoid using environment variables for controlling the flow of + * CMake code (such as in IF commands). The build system generated by CMake + * may re-run CMake automatically when CMakeLists.txt files change. The + * environment in which this is executed is controlled by the build system and + * may not match that in which CMake was originally run. If you want to + * control build settings on the CMake command line, you need to use cache + * variables set with the -D option. The settings will be saved in + * CMakeCache.txt so that they don't have to be repeated every time CMake is + * run on the same build tree." + */ + @Parameter + private Map env; + + /** + * CMake cached variables to set. + */ + @Parameter + private Map vars; + + // TODO: support Windows + private static void validatePlatform() throws MojoExecutionException { + if (System.getProperty("os.name").toLowerCase().startsWith("windows")) { + throw new MojoExecutionException("CMakeBuilder does not yet support " + + "the Windows platform."); + } + } + + public void execute() throws MojoExecutionException { + long start = System.nanoTime(); + validatePlatform(); + runCMake(); + runMake(); + runMake(); // The second make is a workaround for HADOOP-9215. It can be + // removed when cmake 2.6 is no longer supported. + long end = System.nanoTime(); + getLog().info("cmake compilation finished successfully in " + + TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS) + + " millisecond(s)."); + } + + /** + * Validate that source parameters look sane. + */ + static void validateSourceParams(File source, File output) + throws MojoExecutionException { + String cOutput = null, cSource = null; + try { + cOutput = output.getCanonicalPath(); + } catch (IOException e) { + throw new MojoExecutionException("error getting canonical path " + + "for output", e); + } + try { + cSource = source.getCanonicalPath(); + } catch (IOException e) { + throw new MojoExecutionException("error getting canonical path " + + "for source", e); + } + + // This doesn't catch all the bad cases-- we could be following symlinks or + // hardlinks, etc. However, this will usually catch a common mistake. + if (cSource.startsWith(cOutput)) { + throw new MojoExecutionException("The source directory must not be " + + "inside the output directory (it would be destroyed by " + + "'mvn clean')"); + } + } + + public void runCMake() throws MojoExecutionException { + validatePlatform(); + validateSourceParams(source, output); + + if (output.mkdirs()) { + getLog().info("mkdirs '" + output + "'"); + } + List cmd = new LinkedList(); + cmd.add("cmake"); + cmd.add(source.getAbsolutePath()); + for (Map.Entry entry : vars.entrySet()) { + if ((entry.getValue() != null) && (!entry.getValue().equals(""))) { + cmd.add("-D" + entry.getKey() + "=" + entry.getValue()); + } + } + cmd.add("-G"); + cmd.add("Unix Makefiles"); + String prefix = ""; + StringBuilder bld = new StringBuilder(); + for (String c : cmd) { + bld.append(prefix).append(c); + prefix = " "; + } + getLog().info("Running " + bld.toString()); + getLog().info("with extra environment variables " + Exec.envToString(env)); + ProcessBuilder pb = new ProcessBuilder(cmd); + pb.directory(output); + pb.redirectErrorStream(true); + Exec.addEnvironment(pb, env); + Process proc = null; + OutputBufferThread outThread = null; + int retCode = -1; + try { + proc = pb.start(); + outThread = new OutputBufferThread(proc.getInputStream()); + outThread.start(); + + retCode = proc.waitFor(); + if (retCode != 0) { + throw new MojoExecutionException("CMake failed with error code " + + retCode); + } + } catch (IOException e) { + throw new MojoExecutionException("Error executing CMake", e); + } catch (InterruptedException e) { + throw new MojoExecutionException("Interrupted while waiting for " + + "CMake process", e); + } finally { + if (proc != null) { + proc.destroy(); + } + if (outThread != null) { + try { + outThread.interrupt(); + outThread.join(); + } catch (InterruptedException e) { + getLog().error("Interrupted while joining output thread", e); + } + if (retCode != 0) { + for (String line : outThread.getOutput()) { + getLog().warn(line); + } + } + } + } + } + + public void runMake() throws MojoExecutionException { + List cmd = new LinkedList(); + cmd.add("make"); + // TODO: it would be nice to determine the number of CPUs and set the + // parallelism to that. It requires some platform-specific logic, though. + cmd.add("-j"); + cmd.add(String.valueOf(availableProcessors)); + cmd.add("VERBOSE=1"); + if (target != null) { + cmd.add(target); + } + StringBuilder bld = new StringBuilder(); + String prefix = ""; + for (String c : cmd) { + bld.append(prefix).append(c); + prefix = " "; + } + getLog().info("Running " + bld.toString()); + ProcessBuilder pb = new ProcessBuilder(cmd); + pb.directory(output); + Process proc = null; + int retCode = -1; + OutputBufferThread stdoutThread = null, stderrThread = null; + try { + proc = pb.start(); + stdoutThread = new OutputBufferThread(proc.getInputStream()); + stderrThread = new OutputBufferThread(proc.getErrorStream()); + stdoutThread.start(); + stderrThread.start(); + retCode = proc.waitFor(); + if (retCode != 0) { + throw new MojoExecutionException("make failed with error code " + + retCode); + } + } catch (InterruptedException e) { + throw new MojoExecutionException("Interrupted during Process#waitFor", e); + } catch (IOException e) { + throw new MojoExecutionException("Error executing make", e); + } finally { + if (stdoutThread != null) { + try { + stdoutThread.join(); + } catch (InterruptedException e) { + getLog().error("Interrupted while joining stdoutThread", e); + } + if (retCode != 0) { + for (String line: stdoutThread.getOutput()) { + getLog().warn(line); + } + } + } + if (stderrThread != null) { + try { + stderrThread.join(); + } catch (InterruptedException e) { + getLog().error("Interrupted while joining stderrThread", e); + } + // We always print stderr, since it contains the compiler warning + // messages. These are interesting even if compilation succeeded. + for (String line: stderrThread.getOutput()) { + getLog().warn(line); + } + } + if (proc != null) proc.destroy(); + } + } +} diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java new file mode 100644 index 00000000000..fa7176b2166 --- /dev/null +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java @@ -0,0 +1,383 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.maven.plugin.cmakebuilder; + +import org.apache.hadoop.maven.plugin.util.Exec; +import org.apache.maven.execution.MavenSession; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.LifecyclePhase; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.concurrent.TimeUnit; +import java.util.LinkedList; +import java.util.List; +import java.util.HashMap; +import java.util.Map; + +/** + * Goal which runs a native unit test. + */ +@Mojo(name="cmake-test", defaultPhase = LifecyclePhase.TEST) +public class TestMojo extends AbstractMojo { + /** + * A value for -Dtest= that runs all native tests. + */ + private final static String ALL_NATIVE = "allNative"; + + /** + * Location of the binary to run. + */ + @Parameter(required=true) + private File binary; + + /** + * Name of this test. + * + * Defaults to the basename of the binary. So if your binary is /foo/bar/baz, + * this will default to 'baz.' + */ + @Parameter + private String testName; + + /** + * Environment variables to pass to the binary. + * + */ + @Parameter + private Map env; + + /** + * Arguments to pass to the binary. + */ + @Parameter + private List args = new LinkedList(); + + /** + * Number of seconds to wait before declaring the test failed. + * + */ + @Parameter(defaultValue="600") + private int timeout; + + /** + * Path to results directory. + */ + @Parameter(defaultValue="native-results") + private File results; + + /** + * A list of preconditions which must be true for this test to be run. + */ + @Parameter + private Map preconditions = new HashMap(); + + /** + * If true, pass over the test without an error if the binary is missing. + */ + @Parameter(defaultValue="false") + private boolean skipIfMissing; + + /** + * What result to expect from the test + * + * Can be either "success", "failure", or "any". + */ + @Parameter(defaultValue="success") + private String expectedResult; + + /** + * The Maven Session Object + */ + @Parameter(defaultValue="${session}", readonly=true, required=true) + private MavenSession session; + + // TODO: support Windows + private static void validatePlatform() throws MojoExecutionException { + if (System.getProperty("os.name").toLowerCase().startsWith("windows")) { + throw new MojoExecutionException("CMakeBuilder does not yet support " + + "the Windows platform."); + } + } + + /** + * The test thread waits for the process to terminate. + * + * Since Process#waitFor doesn't take a timeout argument, we simulate one by + * interrupting this thread after a certain amount of time has elapsed. + */ + private static class TestThread extends Thread { + private Process proc; + private int retCode = -1; + + public TestThread(Process proc) { + this.proc = proc; + } + + public void run() { + try { + retCode = proc.waitFor(); + } catch (InterruptedException e) { + retCode = -1; + } + } + + public int retCode() { + return retCode; + } + } + + /** + * Write to the status file. + * + * The status file will contain a string describing the exit status of the + * test. It will be SUCCESS if the test returned success (return code 0), a + * numerical code if it returned a non-zero status, or IN_PROGRESS or + * TIMED_OUT. + */ + private void writeStatusFile(String status) throws IOException { + FileOutputStream fos = new FileOutputStream(new File(results, + testName + ".pstatus")); + BufferedWriter out = null; + try { + out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8")); + out.write(status + "\n"); + } finally { + if (out != null) { + out.close(); + } else { + fos.close(); + } + } + } + + private static boolean isTruthy(String str) { + if (str == null) + return false; + if (str.equalsIgnoreCase("")) + return false; + if (str.equalsIgnoreCase("false")) + return false; + if (str.equalsIgnoreCase("no")) + return false; + if (str.equalsIgnoreCase("off")) + return false; + if (str.equalsIgnoreCase("disable")) + return false; + return true; + } + + + final static private String VALID_PRECONDITION_TYPES_STR = + "Valid precondition types are \"and\", \"andNot\""; + + /** + * Validate the parameters that the user has passed. + * @throws MojoExecutionException + */ + private void validateParameters() throws MojoExecutionException { + if (!(expectedResult.equals("success") || + expectedResult.equals("failure") || + expectedResult.equals("any"))) { + throw new MojoExecutionException("expectedResult must be either " + + "success, failure, or any"); + } + } + + private boolean shouldRunTest() throws MojoExecutionException { + // Were we told to skip all tests? + String skipTests = session. + getExecutionProperties().getProperty("skipTests"); + if (isTruthy(skipTests)) { + getLog().info("skipTests is in effect for test " + testName); + return false; + } + // Does the binary exist? If not, we shouldn't try to run it. + if (!binary.exists()) { + if (skipIfMissing) { + getLog().info("Skipping missing test " + testName); + return false; + } else { + throw new MojoExecutionException("Test " + binary + + " was not built! (File does not exist.)"); + } + } + // If there is an explicit list of tests to run, it should include this + // test. + String testProp = session. + getExecutionProperties().getProperty("test"); + if (testProp != null) { + String testPropArr[] = testProp.split(","); + boolean found = false; + for (String test : testPropArr) { + if (test.equals(ALL_NATIVE)) { + found = true; + break; + } + if (test.equals(testName)) { + found = true; + break; + } + } + if (!found) { + getLog().debug("did not find test '" + testName + "' in " + + "list " + testProp); + return false; + } + } + // Are all the preconditions satistfied? + if (preconditions != null) { + int idx = 1; + for (Map.Entry entry : preconditions.entrySet()) { + String key = entry.getKey(); + String val = entry.getValue(); + if (key == null) { + throw new MojoExecutionException("NULL is not a valid " + + "precondition type. " + VALID_PRECONDITION_TYPES_STR); + } if (key.equals("and")) { + if (!isTruthy(val)) { + getLog().info("Skipping test " + testName + + " because precondition number " + idx + " was not met."); + return false; + } + } else if (key.equals("andNot")) { + if (isTruthy(val)) { + getLog().info("Skipping test " + testName + + " because negative precondition number " + idx + + " was met."); + return false; + } + } else { + throw new MojoExecutionException(key + " is not a valid " + + "precondition type. " + VALID_PRECONDITION_TYPES_STR); + } + idx++; + } + } + // OK, we should run this. + return true; + } + + public void execute() throws MojoExecutionException { + if (testName == null) { + testName = binary.getName(); + } + validatePlatform(); + validateParameters(); + if (!shouldRunTest()) { + return; + } + if (!results.isDirectory()) { + if (!results.mkdirs()) { + throw new MojoExecutionException("Failed to create " + + "output directory '" + results + "'!"); + } + } + List cmd = new LinkedList(); + cmd.add(binary.getAbsolutePath()); + + getLog().info("-------------------------------------------------------"); + getLog().info(" C M A K E B U I L D E R T E S T"); + getLog().info("-------------------------------------------------------"); + StringBuilder bld = new StringBuilder(); + bld.append(testName).append(": running "); + bld.append(binary.getAbsolutePath()); + for (String entry : args) { + cmd.add(entry); + bld.append(" ").append(entry); + } + getLog().info(bld.toString()); + ProcessBuilder pb = new ProcessBuilder(cmd); + Exec.addEnvironment(pb, env); + pb.redirectError(new File(results, testName + ".stderr")); + pb.redirectOutput(new File(results, testName + ".stdout")); + getLog().info("with extra environment variables " + Exec.envToString(env)); + Process proc = null; + TestThread testThread = null; + int retCode = -1; + String status = "IN_PROGRESS"; + try { + writeStatusFile(status); + } catch (IOException e) { + throw new MojoExecutionException("Error writing the status file", e); + } + long start = System.nanoTime(); + try { + proc = pb.start(); + testThread = new TestThread(proc); + testThread.start(); + testThread.join(timeout * 1000); + if (!testThread.isAlive()) { + retCode = testThread.retCode(); + testThread = null; + proc = null; + } + } catch (IOException e) { + throw new MojoExecutionException("IOException while executing the test " + + testName, e); + } catch (InterruptedException e) { + throw new MojoExecutionException("Interrupted while executing " + + "the test " + testName, e); + } finally { + if (testThread != null) { + // If the test thread didn't exit yet, that means the timeout expired. + testThread.interrupt(); + try { + testThread.join(); + } catch (InterruptedException e) { + getLog().error("Interrupted while waiting for testThread", e); + } + status = "TIMED OUT"; + } else if (retCode == 0) { + status = "SUCCESS"; + } else { + status = "ERROR CODE " + String.valueOf(retCode); + } + try { + writeStatusFile(status); + } catch (Exception e) { + getLog().error("failed to write status file!", e); + } + if (proc != null) { + proc.destroy(); + } + } + long end = System.nanoTime(); + getLog().info("STATUS: " + status + " after " + + TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS) + + " millisecond(s)."); + getLog().info("-------------------------------------------------------"); + if (status.equals("TIMED_OUT")) { + if (expectedResult.equals("success")) { + throw new MojoExecutionException("Test " + binary + + " timed out after " + timeout + " seconds!"); + } + } else if (!status.equals("SUCCESS")) { + if (expectedResult.equals("success")) { + throw new MojoExecutionException("Test " + binary + + " returned " + status); + } + } else if (expectedResult.equals("failure")) { + throw new MojoExecutionException("Test " + binary + + " succeeded, but we expected failure!"); + } + } +} diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java index ce3543cd418..b0fa3ab069e 100644 --- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java @@ -22,6 +22,7 @@ import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; +import java.util.Map; /** * Exec is a helper class for executing an external process from a mojo. @@ -93,7 +94,7 @@ public class Exec { * OutputBufferThread is a background thread for consuming and storing output * of the external process. */ - private static class OutputBufferThread extends Thread { + public static class OutputBufferThread extends Thread { private List output; private BufferedReader reader; @@ -134,4 +135,42 @@ public class Exec { return output; } } + + /** + * Add environment variables to a ProcessBuilder. + */ + public static void addEnvironment(ProcessBuilder pb, + Map env) { + if (env == null) { + return; + } + Map processEnv = pb.environment(); + for (Map.Entry entry : env.entrySet()) { + String val = entry.getValue(); + if (val == null) { + val = ""; + } + processEnv.put(entry.getKey(), val); + } + } + + /** + * Pretty-print the environment to a StringBuilder. + */ + public static String envToString(Map env) { + StringBuilder bld = new StringBuilder(); + bld.append("{"); + if (env != null) { + for (Map.Entry entry : env.entrySet()) { + String val = entry.getValue(); + if (val == null) { + val = ""; + } + bld.append("\n ").append(entry.getKey()). + append(" = '").append(val).append("'\n"); + } + } + bld.append("}"); + return bld.toString(); + } } diff --git a/hadoop-tools/hadoop-pipes/pom.xml b/hadoop-tools/hadoop-pipes/pom.xml index a40da630e4c..cc3532ce1ab 100644 --- a/hadoop-tools/hadoop-pipes/pom.xml +++ b/hadoop-tools/hadoop-pipes/pom.xml @@ -43,72 +43,24 @@ - org.apache.maven.plugins - maven-antrun-plugin + org.apache.hadoop + hadoop-maven-plugins - make + cmake-compile compile - run + cmake-compile - - - - - - - - - - - + ${basedir}/src + + ${sun.arch.data.model} + - - - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml index 4ec34743e2c..04c3f841f65 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml @@ -172,43 +172,32 @@ - org.apache.maven.plugins - maven-antrun-plugin - - false - + org.apache.hadoop + hadoop-maven-plugins - make + cmake-compile compile - run + cmake-compile - - - - - - - - - - - - + ${basedir}/src + + ${container-executor.conf.dir} + ${sun.arch.data.model} + + + ${container-executor.additional_cflags} + - native_tests + test-container-executor + cmake-test test - - - - - - - + ${project.build.directory}/native/target/usr/local/bin/test-container-executor + 300 + ${project.build.directory}/native-results diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c index 3db75eab072..6d10509bf06 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c @@ -470,6 +470,13 @@ void run_test_in_child(const char* test_name, void (*func)()) { } void test_signal_container() { + sigset_t set; + + // unblock SIGQUIT + sigemptyset(&set); + sigaddset(&set, SIGQUIT); + sigprocmask(SIG_UNBLOCK, &set, NULL); + printf("\nTesting signal_container\n"); fflush(stdout); fflush(stderr);