HADOOP-8887. Use a Maven plugin to build the native code using CMake (cmccabe)

(cherry picked from commit b1ed28fa77)

Conflicts:
	hadoop-common-project/hadoop-common/pom.xml
	hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
This commit is contained in:
Colin Patrick Mccabe 2016-01-14 11:02:34 -08:00
parent f0923819c3
commit cea1c71087
10 changed files with 775 additions and 124 deletions

View File

@ -175,6 +175,10 @@ Maven build goals:
* -Dtest=<TESTCLASSNAME>,<TESTCLASSNAME#METHODNAME>,.... * -Dtest=<TESTCLASSNAME>,<TESTCLASSNAME#METHODNAME>,....
* -Dtest.exclude=<TESTCLASSNAME> * -Dtest.exclude=<TESTCLASSNAME>
* -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
* To run all native unit tests, use: mvn test -Pnative -Dtest=allNative
* To run a specific native unit test, use: mvn test -Pnative -Dtest=<test>
For example, to run test_bulk_crc32, you would use:
mvn test -Pnative -Dtest=test_bulk_crc32
---------------------------------------------------------------------------------- ----------------------------------------------------------------------------------
Building components separately Building components separately

View File

@ -25,6 +25,9 @@ Release 2.9.0 - UNRELEASED
HADOOP-12683. Add number of samples in last interval in snapshot of HADOOP-12683. Add number of samples in last interval in snapshot of
MutableStat. (Vikram Srivastava via kasha) MutableStat. (Vikram Srivastava via kasha)
HADOOP-8887. Use a Maven plugin to build the native code using CMake
(cmccabe)
BUG FIXES BUG FIXES
HADOOP-12655. TestHttpServer.testBindAddress bind port range is wider HADOOP-12655. TestHttpServer.testBindAddress bind port range is wider

View File

@ -581,39 +581,39 @@
</executions> </executions>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
<executions> <executions>
<execution> <execution>
<id>make</id> <id>cmake-compile</id>
<phase>compile</phase> <phase>compile</phase>
<goals><goal>run</goal></goals> <goals><goal>cmake-compile</goal></goals>
<configuration> <configuration>
<target> <source>${basedir}/src</source>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true"> <vars>
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_BZIP2=${require.bzip2} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include} -DREQUIRE_OPENSSL=${require.openssl} -DCUSTOM_OPENSSL_PREFIX=${openssl.prefix} -DCUSTOM_OPENSSL_LIB=${openssl.lib} -DCUSTOM_OPENSSL_INCLUDE=${openssl.include} -DEXTRA_LIBHADOOP_RPATH=${extra.libhadoop.rpath}"/> <GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
</exec> <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true"> <REQUIRE_BZIP2>${require.bzip2}</REQUIRE_BZIP2>
<arg line="VERBOSE=1"/> <REQUIRE_SNAPPY>${require.snappy}</REQUIRE_SNAPPY>
</exec> <CUSTOM_SNAPPY_PREFIX>${snappy.prefix}</CUSTOM_SNAPPY_PREFIX>
<!-- The second make is a workaround for HADOOP-9215. It can <CUSTOM_SNAPPY_LIB>${snappy.lib} </CUSTOM_SNAPPY_LIB>
be removed when version 2.6 of cmake is no longer supported . --> <CUSTOM_SNAPPY_INCLUDE>${snappy.include} </CUSTOM_SNAPPY_INCLUDE>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec> <REQUIRE_OPENSSL>${require.openssl} </REQUIRE_OPENSSL>
</target> <CUSTOM_OPENSSL_PREFIX>${openssl.prefix} </CUSTOM_OPENSSL_PREFIX>
<CUSTOM_OPENSSL_LIB>${openssl.lib} </CUSTOM_OPENSSL_LIB>
<CUSTOM_OPENSSL_INCLUDE>${openssl.include} </CUSTOM_OPENSSL_INCLUDE>
<EXTRA_LIBHADOOP_RPATH>${extra.libhadoop.rpath}</EXTRA_LIBHADOOP_RPATH>
</vars>
</configuration> </configuration>
</execution> </execution>
<execution> <execution>
<id>native_tests</id> <id>test_bulk_crc32</id>
<goals><goal>cmake-test</goal></goals>
<phase>test</phase> <phase>test</phase>
<goals><goal>run</goal></goals>
<configuration> <configuration>
<target> <binary>${project.build.directory}/native/test_bulk_crc32</binary>
<exec executable="${shell-executable}" failonerror="true" dir="${project.build.directory}/native"> <timeout>1200</timeout>
<arg value="-c"/> <results>${project.build.directory}/native-results</results>
<arg value="[ x$SKIPTESTS = xtrue ] || ${project.build.directory}/native/test_bulk_crc32"/>
<env key="SKIPTESTS" value="${skipTests}"/>
</exec>
</target>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -186,25 +186,30 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</properties> </properties>
<build> <build>
<plugins> <plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>cmake-compile</id>
<phase>compile</phase>
<goals><goal>cmake-compile</goal></goals>
<configuration>
<source>${basedir}/src</source>
<vars>
<GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
<JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
<REQUIRE_FUSE>${require.fuse}</REQUIRE_FUSE>
</vars>
<output>${project.build.directory}</output>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>maven-antrun-plugin</artifactId>
<executions> <executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}"/>
<exec executable="cmake" dir="${project.build.directory}" failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_FUSE=${require.fuse}"/>
</exec>
<exec executable="make" dir="${project.build.directory}" failonerror="true">
</exec>
</target>
</configuration>
</execution>
<execution> <execution>
<id>native_tests</id> <id>native_tests</id>
<phase>test</phase> <phase>test</phase>

View File

@ -0,0 +1,269 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.maven.plugin.cmakebuilder;
import org.apache.hadoop.maven.plugin.util.Exec.OutputBufferThread;
import org.apache.hadoop.maven.plugin.util.Exec;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.concurrent.TimeUnit;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Goal which builds the native sources
*/
@Mojo(name="cmake-compile", defaultPhase = LifecyclePhase.COMPILE)
public class CompileMojo extends AbstractMojo {
private static int availableProcessors =
Runtime.getRuntime().availableProcessors();
/**
* Location of the build products.
*/
@Parameter(defaultValue="${project.build.directory}/native")
private File output;
/**
* Location of the source files.
* This should be where the sources are checked in.
*/
@Parameter(defaultValue="${basedir}/src/main/native", required=true)
private File source;
/**
* CMake build target.
*/
@Parameter
private String target;
/**
* Environment variables to pass to CMake.
*
* Note that it is usually better to use a CMake variable than an environment
* variable. To quote the CMake FAQ:
*
* "One should avoid using environment variables for controlling the flow of
* CMake code (such as in IF commands). The build system generated by CMake
* may re-run CMake automatically when CMakeLists.txt files change. The
* environment in which this is executed is controlled by the build system and
* may not match that in which CMake was originally run. If you want to
* control build settings on the CMake command line, you need to use cache
* variables set with the -D option. The settings will be saved in
* CMakeCache.txt so that they don't have to be repeated every time CMake is
* run on the same build tree."
*/
@Parameter
private Map<String, String> env;
/**
* CMake cached variables to set.
*/
@Parameter
private Map<String, String> vars;
// TODO: support Windows
private static void validatePlatform() throws MojoExecutionException {
if (System.getProperty("os.name").toLowerCase().startsWith("windows")) {
throw new MojoExecutionException("CMakeBuilder does not yet support " +
"the Windows platform.");
}
}
public void execute() throws MojoExecutionException {
long start = System.nanoTime();
validatePlatform();
runCMake();
runMake();
runMake(); // The second make is a workaround for HADOOP-9215. It can be
// removed when cmake 2.6 is no longer supported.
long end = System.nanoTime();
getLog().info("cmake compilation finished successfully in " +
TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS) +
" millisecond(s).");
}
/**
* Validate that source parameters look sane.
*/
static void validateSourceParams(File source, File output)
throws MojoExecutionException {
String cOutput = null, cSource = null;
try {
cOutput = output.getCanonicalPath();
} catch (IOException e) {
throw new MojoExecutionException("error getting canonical path " +
"for output", e);
}
try {
cSource = source.getCanonicalPath();
} catch (IOException e) {
throw new MojoExecutionException("error getting canonical path " +
"for source", e);
}
// This doesn't catch all the bad cases-- we could be following symlinks or
// hardlinks, etc. However, this will usually catch a common mistake.
if (cSource.startsWith(cOutput)) {
throw new MojoExecutionException("The source directory must not be " +
"inside the output directory (it would be destroyed by " +
"'mvn clean')");
}
}
public void runCMake() throws MojoExecutionException {
validatePlatform();
validateSourceParams(source, output);
if (output.mkdirs()) {
getLog().info("mkdirs '" + output + "'");
}
List<String> cmd = new LinkedList<String>();
cmd.add("cmake");
cmd.add(source.getAbsolutePath());
for (Map.Entry<String, String> entry : vars.entrySet()) {
if ((entry.getValue() != null) && (!entry.getValue().equals(""))) {
cmd.add("-D" + entry.getKey() + "=" + entry.getValue());
}
}
cmd.add("-G");
cmd.add("Unix Makefiles");
String prefix = "";
StringBuilder bld = new StringBuilder();
for (String c : cmd) {
bld.append(prefix).append(c);
prefix = " ";
}
getLog().info("Running " + bld.toString());
getLog().info("with extra environment variables " + Exec.envToString(env));
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(output);
pb.redirectErrorStream(true);
Exec.addEnvironment(pb, env);
Process proc = null;
OutputBufferThread outThread = null;
int retCode = -1;
try {
proc = pb.start();
outThread = new OutputBufferThread(proc.getInputStream());
outThread.start();
retCode = proc.waitFor();
if (retCode != 0) {
throw new MojoExecutionException("CMake failed with error code " +
retCode);
}
} catch (IOException e) {
throw new MojoExecutionException("Error executing CMake", e);
} catch (InterruptedException e) {
throw new MojoExecutionException("Interrupted while waiting for " +
"CMake process", e);
} finally {
if (proc != null) {
proc.destroy();
}
if (outThread != null) {
try {
outThread.interrupt();
outThread.join();
} catch (InterruptedException e) {
getLog().error("Interrupted while joining output thread", e);
}
if (retCode != 0) {
for (String line : outThread.getOutput()) {
getLog().warn(line);
}
}
}
}
}
public void runMake() throws MojoExecutionException {
List<String> cmd = new LinkedList<String>();
cmd.add("make");
// TODO: it would be nice to determine the number of CPUs and set the
// parallelism to that. It requires some platform-specific logic, though.
cmd.add("-j");
cmd.add(String.valueOf(availableProcessors));
cmd.add("VERBOSE=1");
if (target != null) {
cmd.add(target);
}
StringBuilder bld = new StringBuilder();
String prefix = "";
for (String c : cmd) {
bld.append(prefix).append(c);
prefix = " ";
}
getLog().info("Running " + bld.toString());
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(output);
Process proc = null;
int retCode = -1;
OutputBufferThread stdoutThread = null, stderrThread = null;
try {
proc = pb.start();
stdoutThread = new OutputBufferThread(proc.getInputStream());
stderrThread = new OutputBufferThread(proc.getErrorStream());
stdoutThread.start();
stderrThread.start();
retCode = proc.waitFor();
if (retCode != 0) {
throw new MojoExecutionException("make failed with error code " +
retCode);
}
} catch (InterruptedException e) {
throw new MojoExecutionException("Interrupted during Process#waitFor", e);
} catch (IOException e) {
throw new MojoExecutionException("Error executing make", e);
} finally {
if (stdoutThread != null) {
try {
stdoutThread.join();
} catch (InterruptedException e) {
getLog().error("Interrupted while joining stdoutThread", e);
}
if (retCode != 0) {
for (String line: stdoutThread.getOutput()) {
getLog().warn(line);
}
}
}
if (stderrThread != null) {
try {
stderrThread.join();
} catch (InterruptedException e) {
getLog().error("Interrupted while joining stderrThread", e);
}
// We always print stderr, since it contains the compiler warning
// messages. These are interesting even if compilation succeeded.
for (String line: stderrThread.getOutput()) {
getLog().warn(line);
}
}
if (proc != null) proc.destroy();
}
}
}

View File

@ -0,0 +1,383 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.maven.plugin.cmakebuilder;
import org.apache.hadoop.maven.plugin.util.Exec;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.concurrent.TimeUnit;
import java.util.LinkedList;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
/**
* Goal which runs a native unit test.
*/
@Mojo(name="cmake-test", defaultPhase = LifecyclePhase.TEST)
public class TestMojo extends AbstractMojo {
/**
* A value for -Dtest= that runs all native tests.
*/
private final static String ALL_NATIVE = "allNative";
/**
* Location of the binary to run.
*/
@Parameter(required=true)
private File binary;
/**
* Name of this test.
*
* Defaults to the basename of the binary. So if your binary is /foo/bar/baz,
* this will default to 'baz.'
*/
@Parameter
private String testName;
/**
* Environment variables to pass to the binary.
*
*/
@Parameter
private Map<String, String> env;
/**
* Arguments to pass to the binary.
*/
@Parameter
private List<String> args = new LinkedList<String>();
/**
* Number of seconds to wait before declaring the test failed.
*
*/
@Parameter(defaultValue="600")
private int timeout;
/**
* Path to results directory.
*/
@Parameter(defaultValue="native-results")
private File results;
/**
* A list of preconditions which must be true for this test to be run.
*/
@Parameter
private Map<String, String> preconditions = new HashMap<String, String>();
/**
* If true, pass over the test without an error if the binary is missing.
*/
@Parameter(defaultValue="false")
private boolean skipIfMissing;
/**
* What result to expect from the test
*
* Can be either "success", "failure", or "any".
*/
@Parameter(defaultValue="success")
private String expectedResult;
/**
* The Maven Session Object
*/
@Parameter(defaultValue="${session}", readonly=true, required=true)
private MavenSession session;
// TODO: support Windows
private static void validatePlatform() throws MojoExecutionException {
if (System.getProperty("os.name").toLowerCase().startsWith("windows")) {
throw new MojoExecutionException("CMakeBuilder does not yet support " +
"the Windows platform.");
}
}
/**
* The test thread waits for the process to terminate.
*
* Since Process#waitFor doesn't take a timeout argument, we simulate one by
* interrupting this thread after a certain amount of time has elapsed.
*/
private static class TestThread extends Thread {
private Process proc;
private int retCode = -1;
public TestThread(Process proc) {
this.proc = proc;
}
public void run() {
try {
retCode = proc.waitFor();
} catch (InterruptedException e) {
retCode = -1;
}
}
public int retCode() {
return retCode;
}
}
/**
* Write to the status file.
*
* The status file will contain a string describing the exit status of the
* test. It will be SUCCESS if the test returned success (return code 0), a
* numerical code if it returned a non-zero status, or IN_PROGRESS or
* TIMED_OUT.
*/
private void writeStatusFile(String status) throws IOException {
FileOutputStream fos = new FileOutputStream(new File(results,
testName + ".pstatus"));
BufferedWriter out = null;
try {
out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8"));
out.write(status + "\n");
} finally {
if (out != null) {
out.close();
} else {
fos.close();
}
}
}
private static boolean isTruthy(String str) {
if (str == null)
return false;
if (str.equalsIgnoreCase(""))
return false;
if (str.equalsIgnoreCase("false"))
return false;
if (str.equalsIgnoreCase("no"))
return false;
if (str.equalsIgnoreCase("off"))
return false;
if (str.equalsIgnoreCase("disable"))
return false;
return true;
}
final static private String VALID_PRECONDITION_TYPES_STR =
"Valid precondition types are \"and\", \"andNot\"";
/**
* Validate the parameters that the user has passed.
* @throws MojoExecutionException
*/
private void validateParameters() throws MojoExecutionException {
if (!(expectedResult.equals("success") ||
expectedResult.equals("failure") ||
expectedResult.equals("any"))) {
throw new MojoExecutionException("expectedResult must be either " +
"success, failure, or any");
}
}
private boolean shouldRunTest() throws MojoExecutionException {
// Were we told to skip all tests?
String skipTests = session.
getExecutionProperties().getProperty("skipTests");
if (isTruthy(skipTests)) {
getLog().info("skipTests is in effect for test " + testName);
return false;
}
// Does the binary exist? If not, we shouldn't try to run it.
if (!binary.exists()) {
if (skipIfMissing) {
getLog().info("Skipping missing test " + testName);
return false;
} else {
throw new MojoExecutionException("Test " + binary +
" was not built! (File does not exist.)");
}
}
// If there is an explicit list of tests to run, it should include this
// test.
String testProp = session.
getExecutionProperties().getProperty("test");
if (testProp != null) {
String testPropArr[] = testProp.split(",");
boolean found = false;
for (String test : testPropArr) {
if (test.equals(ALL_NATIVE)) {
found = true;
break;
}
if (test.equals(testName)) {
found = true;
break;
}
}
if (!found) {
getLog().debug("did not find test '" + testName + "' in "
+ "list " + testProp);
return false;
}
}
// Are all the preconditions satistfied?
if (preconditions != null) {
int idx = 1;
for (Map.Entry<String, String> entry : preconditions.entrySet()) {
String key = entry.getKey();
String val = entry.getValue();
if (key == null) {
throw new MojoExecutionException("NULL is not a valid " +
"precondition type. " + VALID_PRECONDITION_TYPES_STR);
} if (key.equals("and")) {
if (!isTruthy(val)) {
getLog().info("Skipping test " + testName +
" because precondition number " + idx + " was not met.");
return false;
}
} else if (key.equals("andNot")) {
if (isTruthy(val)) {
getLog().info("Skipping test " + testName +
" because negative precondition number " + idx +
" was met.");
return false;
}
} else {
throw new MojoExecutionException(key + " is not a valid " +
"precondition type. " + VALID_PRECONDITION_TYPES_STR);
}
idx++;
}
}
// OK, we should run this.
return true;
}
public void execute() throws MojoExecutionException {
if (testName == null) {
testName = binary.getName();
}
validatePlatform();
validateParameters();
if (!shouldRunTest()) {
return;
}
if (!results.isDirectory()) {
if (!results.mkdirs()) {
throw new MojoExecutionException("Failed to create " +
"output directory '" + results + "'!");
}
}
List<String> cmd = new LinkedList<String>();
cmd.add(binary.getAbsolutePath());
getLog().info("-------------------------------------------------------");
getLog().info(" C M A K E B U I L D E R T E S T");
getLog().info("-------------------------------------------------------");
StringBuilder bld = new StringBuilder();
bld.append(testName).append(": running ");
bld.append(binary.getAbsolutePath());
for (String entry : args) {
cmd.add(entry);
bld.append(" ").append(entry);
}
getLog().info(bld.toString());
ProcessBuilder pb = new ProcessBuilder(cmd);
Exec.addEnvironment(pb, env);
pb.redirectError(new File(results, testName + ".stderr"));
pb.redirectOutput(new File(results, testName + ".stdout"));
getLog().info("with extra environment variables " + Exec.envToString(env));
Process proc = null;
TestThread testThread = null;
int retCode = -1;
String status = "IN_PROGRESS";
try {
writeStatusFile(status);
} catch (IOException e) {
throw new MojoExecutionException("Error writing the status file", e);
}
long start = System.nanoTime();
try {
proc = pb.start();
testThread = new TestThread(proc);
testThread.start();
testThread.join(timeout * 1000);
if (!testThread.isAlive()) {
retCode = testThread.retCode();
testThread = null;
proc = null;
}
} catch (IOException e) {
throw new MojoExecutionException("IOException while executing the test " +
testName, e);
} catch (InterruptedException e) {
throw new MojoExecutionException("Interrupted while executing " +
"the test " + testName, e);
} finally {
if (testThread != null) {
// If the test thread didn't exit yet, that means the timeout expired.
testThread.interrupt();
try {
testThread.join();
} catch (InterruptedException e) {
getLog().error("Interrupted while waiting for testThread", e);
}
status = "TIMED OUT";
} else if (retCode == 0) {
status = "SUCCESS";
} else {
status = "ERROR CODE " + String.valueOf(retCode);
}
try {
writeStatusFile(status);
} catch (Exception e) {
getLog().error("failed to write status file!", e);
}
if (proc != null) {
proc.destroy();
}
}
long end = System.nanoTime();
getLog().info("STATUS: " + status + " after " +
TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS) +
" millisecond(s).");
getLog().info("-------------------------------------------------------");
if (status.equals("TIMED_OUT")) {
if (expectedResult.equals("success")) {
throw new MojoExecutionException("Test " + binary +
" timed out after " + timeout + " seconds!");
}
} else if (!status.equals("SUCCESS")) {
if (expectedResult.equals("success")) {
throw new MojoExecutionException("Test " + binary +
" returned " + status);
}
} else if (expectedResult.equals("failure")) {
throw new MojoExecutionException("Test " + binary +
" succeeded, but we expected failure!");
}
}
}

View File

@ -22,6 +22,7 @@
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* Exec is a helper class for executing an external process from a mojo. * Exec is a helper class for executing an external process from a mojo.
@ -93,7 +94,7 @@ public int run(List<String> command, List<String> output,
* OutputBufferThread is a background thread for consuming and storing output * OutputBufferThread is a background thread for consuming and storing output
* of the external process. * of the external process.
*/ */
private static class OutputBufferThread extends Thread { public static class OutputBufferThread extends Thread {
private List<String> output; private List<String> output;
private BufferedReader reader; private BufferedReader reader;
@ -134,4 +135,42 @@ public List<String> getOutput() {
return output; return output;
} }
} }
/**
* Add environment variables to a ProcessBuilder.
*/
public static void addEnvironment(ProcessBuilder pb,
Map<String, String> env) {
if (env == null) {
return;
}
Map<String, String> processEnv = pb.environment();
for (Map.Entry<String, String> entry : env.entrySet()) {
String val = entry.getValue();
if (val == null) {
val = "";
}
processEnv.put(entry.getKey(), val);
}
}
/**
* Pretty-print the environment to a StringBuilder.
*/
public static String envToString(Map<String, String> env) {
StringBuilder bld = new StringBuilder();
bld.append("{");
if (env != null) {
for (Map.Entry<String, String> entry : env.entrySet()) {
String val = entry.getValue();
if (val == null) {
val = "";
}
bld.append("\n ").append(entry.getKey()).
append(" = '").append(val).append("'\n");
}
}
bld.append("}");
return bld.toString();
}
} }

View File

@ -43,72 +43,24 @@
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
<executions> <executions>
<execution> <execution>
<id>make</id> <id>cmake-compile</id>
<phase>compile</phase> <phase>compile</phase>
<goals><goal>run</goal></goals> <goals><goal>cmake-compile</goal></goals>
<configuration> <configuration>
<target> <source>${basedir}/src</source>
<mkdir dir="${project.build.directory}/native"/> <vars>
<exec executable="cmake" dir="${project.build.directory}/native" <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
failonerror="true"> </vars>
<arg line="${basedir}/src/ -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
<!-- The second make is a workaround for HADOOP-9215. It can
be removed when version 2.6 of cmake is no longer supported . -->
<exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
</target>
</configuration> </configuration>
</execution> </execution>
<!-- TODO wire here native testcases
<execution>
<id>test</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<destDir>${project.build.directory}/native/target</destDir>
</configuration>
</execution>
-->
</executions> </executions>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
</profile> </profile>
</profiles> </profiles>
<!--
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<phase>generate-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${basedir}/../target/native"/>
<copy toDir="${basedir}/../target/native">
<fileset dir="${basedir}/src/main/native"/>
</copy>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
-->
</project> </project>

View File

@ -172,43 +172,32 @@
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
<configuration>
<skipTests>false</skipTests>
</configuration>
<executions> <executions>
<execution> <execution>
<id>make</id> <id>cmake-compile</id>
<phase>compile</phase> <phase>compile</phase>
<goals><goal>run</goal></goals> <goals><goal>cmake-compile</goal></goals>
<configuration> <configuration>
<target> <source>${basedir}/src</source>
<mkdir dir="${project.build.directory}/native/target"/> <vars>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true"> <HADOOP_CONF_DIR>${container-executor.conf.dir}</HADOOP_CONF_DIR>
<arg line="${basedir}/src/ -DHADOOP_CONF_DIR=${container-executor.conf.dir} -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/> <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
<env key="CFLAGS" value="${container-executor.additional_cflags}"/> </vars>
</exec> <env>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true"> <CFLAGS>${container-executor.additional_cflags}</CFLAGS>
<arg line="VERBOSE=1"/> </env>
</exec>
<!-- The second make is a workaround for HADOOP-9215. It can
be removed when version 2.6 of cmake is no longer supported . -->
<exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
</target>
</configuration> </configuration>
</execution> </execution>
<execution> <execution>
<id>native_tests</id> <id>test-container-executor</id>
<goals><goal>cmake-test</goal></goals>
<phase>test</phase> <phase>test</phase>
<configuration> <configuration>
<target> <binary>${project.build.directory}/native/target/usr/local/bin/test-container-executor</binary>
<exec executable="${shell-executable}" failonerror="true" dir="${project.build.directory}/native"> <timeout>300</timeout>
<arg value="-c"/> <results>${project.build.directory}/native-results</results>
<arg value="[ x$SKIPTESTS = xtrue ] || test-container-executor"/>
<env key="SKIPTESTS" value="${skipTests}"/>
</exec>
</target>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -470,6 +470,13 @@ void run_test_in_child(const char* test_name, void (*func)()) {
} }
void test_signal_container() { void test_signal_container() {
sigset_t set;
// unblock SIGQUIT
sigemptyset(&set);
sigaddset(&set, SIGQUIT);
sigprocmask(SIG_UNBLOCK, &set, NULL);
printf("\nTesting signal_container\n"); printf("\nTesting signal_container\n");
fflush(stdout); fflush(stdout);
fflush(stderr); fflush(stderr);