diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt index 5c3d77db4cd..69c52a986a7 100644 --- a/hadoop-common-project/hadoop-common/src/CMakeLists.txt +++ b/hadoop-common-project/hadoop-common/src/CMakeLists.txt @@ -109,6 +109,7 @@ add_executable(test_bulk_crc32 ) set_property(SOURCE main.cpp PROPERTY INCLUDE_DIRECTORIES "\"-Werror\" \"-Wall\"") +SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) add_dual_library(hadoop ${D}/io/compress/lz4/Lz4Compressor.c ${D}/io/compress/lz4/Lz4Decompressor.c @@ -125,6 +126,17 @@ add_dual_library(hadoop ${D}/util/NativeCrc32.c ${D}/util/bulk_crc32.c ) + +IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + # + # By embedding '$ORIGIN' into the RPATH of libhadoop.so, + # dlopen will look in the directory containing libhadoop.so. + # However, $ORIGIN is not supported by all operating systems. + # + SET_TARGET_PROPERTIES(hadoop + PROPERTIES INSTALL_RPATH "\$ORIGIN/") +ENDIF() + target_link_dual_libraries(hadoop dl ${JAVA_JVM_LIBRARY} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java index 54a96bbe00a..4d82fd272b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java @@ -17,9 +17,6 @@ */ package org.apache.hadoop.util; -import java.io.PrintWriter; -import java.io.StringWriter; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -104,10 +101,7 @@ public final class ExitUtil { * @throws ExitException if System.exit is disabled for test purposes */ public static void terminate(int status, Throwable t) throws ExitException { - StringWriter sw = new StringWriter(); - t.printStackTrace(new PrintWriter(sw)); - terminate(status, "Fatal exception with message " + t.getMessage() + - "\nstack trace\n" + sw.toString()); + terminate(status, StringUtils.stringifyException(t)); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java index 06af8a9f1f1..f11d2a85392 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java @@ -31,9 +31,7 @@ import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; -import java.io.PrintWriter; import java.io.RandomAccessFile; -import java.io.StringWriter; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; @@ -1238,10 +1236,8 @@ public class TestEditLog { } } catch (IOException e) { } catch (Throwable t) { - StringWriter sw = new StringWriter(); - t.printStackTrace(new PrintWriter(sw)); - fail("caught non-IOException throwable with message " + - t.getMessage() + "\nstack trace\n" + sw.toString()); + fail("Caught non-IOException throwable " + + StringUtils.stringifyException(t)); } } finally { if ((elfos != null) && (elfos.isOpen()))