HADOOP-12036. Consolidate all of the cmake extensions in one directory (alanburlison via cmccabe)
This commit is contained in:
parent
60b858bfa6
commit
aa07dea357
|
@ -679,6 +679,9 @@ Release 2.8.0 - UNRELEASED
|
||||||
HADOOP-11885. hadoop-dist dist-layout-stitching.sh does not work with dash.
|
HADOOP-11885. hadoop-dist dist-layout-stitching.sh does not work with dash.
|
||||||
(wang)
|
(wang)
|
||||||
|
|
||||||
|
HADOOP-12036. Consolidate all of the cmake extensions in one directory
|
||||||
|
(alanburlison via cmccabe)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-11802: DomainSocketWatcher thread terminates sometimes after there
|
HADOOP-11802: DomainSocketWatcher thread terminates sometimes after there
|
||||||
|
|
|
@ -0,0 +1,207 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# Common CMake utilities and configuration, shared by all Native components.
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# Platform-specific prerequisite checks.
|
||||||
|
#
|
||||||
|
|
||||||
|
if(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
|
||||||
|
# Only 64-bit Java is supported.
|
||||||
|
if(NOT JVM_ARCH_DATA_MODEL EQUAL 64)
|
||||||
|
message(FATAL_ERROR "Unrecognised JVM_ARCH_DATA_MODEL '${JVM_ARCH_DATA_MODEL}'. "
|
||||||
|
"A 64-bit JVM must be used on Solaris, make sure that one is installed and, "
|
||||||
|
"if necessary, the MAVEN_OPTS environment variable includes '-d64'")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Only gcc is suported for now.
|
||||||
|
if(NOT(CMAKE_COMPILER_IS_GNUCC AND CMAKE_COMPILER_IS_GNUCXX))
|
||||||
|
message(FATAL_ERROR "Only gcc is supported on Solaris")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Helper functions and macros.
|
||||||
|
#
|
||||||
|
|
||||||
|
# Add flags to all the CMake compiler variables
|
||||||
|
macro(hadoop_add_compiler_flags FLAGS)
|
||||||
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAGS}")
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAGS}")
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# Add flags to all the CMake linker variables
|
||||||
|
macro(hadoop_add_linker_flags FLAGS)
|
||||||
|
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAGS}")
|
||||||
|
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${FLAGS}")
|
||||||
|
set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} ${FLAGS}")
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# Compile a library with both shared and static variants.
|
||||||
|
function(hadoop_add_dual_library LIBNAME)
|
||||||
|
add_library(${LIBNAME} SHARED ${ARGN})
|
||||||
|
add_library(${LIBNAME}_static STATIC ${ARGN})
|
||||||
|
set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Link both a static and a dynamic target against some libraries.
|
||||||
|
function(hadoop_target_link_dual_libraries LIBNAME)
|
||||||
|
target_link_libraries(${LIBNAME} ${ARGN})
|
||||||
|
target_link_libraries(${LIBNAME}_static ${ARGN})
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Set all the output directories to the same place.
|
||||||
|
function(hadoop_output_directory TGT DIR)
|
||||||
|
set_target_properties(${TGT} PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
|
||||||
|
set_target_properties(${TGT} PROPERTIES ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
|
||||||
|
set_target_properties(${TGT} PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Set the target directories for dynamic and static builds.
|
||||||
|
function(hadoop_dual_output_directory TGT DIR)
|
||||||
|
hadoop_output_directory(${TGT} "${DIR}")
|
||||||
|
hadoop_output_directory(${TGT}_static "${DIR}")
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Alter the behavior of find_package and find_library so that we find only
|
||||||
|
# shared libraries with a given version suffix. You should save
|
||||||
|
# CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
|
||||||
|
# afterwards. On Windows this function is a no-op. Windows does not encode
|
||||||
|
# version number information information into library path names.
|
||||||
|
macro(hadoop_set_find_shared_library_version LVERS)
|
||||||
|
if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||||
|
# Mac OS uses .dylib
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
|
||||||
|
elseif(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
|
||||||
|
# FreeBSD has always .so installed.
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
|
||||||
|
elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||||
|
# Windows doesn't support finding shared libraries by version.
|
||||||
|
else()
|
||||||
|
# Most UNIX variants use .so
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ".so.${LVERS}")
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# Alter the behavior of find_package and find_library so that we find only
|
||||||
|
# shared libraries without any version suffix. You should save
|
||||||
|
# CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
|
||||||
|
# afterwards. On Windows this function is a no-op. Windows does not encode
|
||||||
|
# version number information information into library path names.
|
||||||
|
macro(hadoop_set_find_shared_library_without_version)
|
||||||
|
if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||||
|
# Mac OS uses .dylib
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib")
|
||||||
|
elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||||
|
# No effect
|
||||||
|
else()
|
||||||
|
# Most UNIX variants use .so
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Configuration.
|
||||||
|
#
|
||||||
|
|
||||||
|
# Initialise the shared gcc/g++ flags if they aren't already defined.
|
||||||
|
if(NOT DEFINED GCC_SHARED_FLAGS)
|
||||||
|
set(GCC_SHARED_FLAGS "-g -O2 -Wall -pthread -D_FILE_OFFSET_BITS=64")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Add in support other compilers here, if necessary,
|
||||||
|
# the assumption is that GCC or a GCC-compatible compiler is being used.
|
||||||
|
|
||||||
|
# Set the shared GCC-compatible compiler and linker flags.
|
||||||
|
hadoop_add_compiler_flags("${GCC_SHARED_FLAGS}")
|
||||||
|
hadoop_add_linker_flags("${LINKER_SHARED_FLAGS}")
|
||||||
|
|
||||||
|
#
|
||||||
|
# Linux-specific configuration.
|
||||||
|
#
|
||||||
|
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||||
|
# Make GNU extensions available.
|
||||||
|
hadoop_add_compiler_flags("-D_GNU_SOURCE")
|
||||||
|
|
||||||
|
# If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
|
||||||
|
if(JVM_ARCH_DATA_MODEL EQUAL 32)
|
||||||
|
# Force 32-bit code generation on amd64/x86_64, ppc64, sparc64
|
||||||
|
if(CMAKE_COMPILER_IS_GNUCC AND CMAKE_SYSTEM_PROCESSOR MATCHES ".*64")
|
||||||
|
hadoop_add_compiler_flags("-m32")
|
||||||
|
hadoop_add_linker_flags("-m32")
|
||||||
|
endif()
|
||||||
|
# Set CMAKE_SYSTEM_PROCESSOR to ensure that find_package(JNI) will use 32-bit libraries
|
||||||
|
if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
|
||||||
|
set(CMAKE_SYSTEM_PROCESSOR "i686")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Determine float ABI of JVM on ARM.
|
||||||
|
if(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
|
||||||
|
find_program(READELF readelf)
|
||||||
|
if(READELF MATCHES "NOTFOUND")
|
||||||
|
message(WARNING "readelf not found; JVM float ABI detection disabled")
|
||||||
|
else(READELF MATCHES "NOTFOUND")
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${READELF} -A ${JAVA_JVM_LIBRARY}
|
||||||
|
OUTPUT_VARIABLE JVM_ELF_ARCH
|
||||||
|
ERROR_QUIET)
|
||||||
|
if(NOT JVM_ELF_ARCH MATCHES "Tag_ABI_VFP_args: VFP registers")
|
||||||
|
# Test compilation with -mfloat-abi=softfp using an arbitrary libc function
|
||||||
|
# (typically fails with "fatal error: bits/predefs.h: No such file or directory"
|
||||||
|
# if soft-float dev libraries are not installed)
|
||||||
|
message("Soft-float JVM detected")
|
||||||
|
include(CMakePushCheckState)
|
||||||
|
cmake_push_check_state()
|
||||||
|
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mfloat-abi=softfp")
|
||||||
|
include(CheckSymbolExists)
|
||||||
|
check_symbol_exists(exit stdlib.h SOFTFP_AVAILABLE)
|
||||||
|
if(NOT SOFTFP_AVAILABLE)
|
||||||
|
message(FATAL_ERROR "Soft-float dev libraries required (e.g. 'apt-get install libc6-dev-armel' on Debian/Ubuntu)")
|
||||||
|
endif()
|
||||||
|
cmake_pop_check_state()
|
||||||
|
hadoop_add_compiler_flags("-mfloat-abi=softfp")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Solaris-specific configuration.
|
||||||
|
#
|
||||||
|
elseif(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
|
||||||
|
# Solaris flags. 64-bit compilation is mandatory, and is checked earlier.
|
||||||
|
hadoop_add_compiler_flags("-m64 -D__EXTENSIONS__ -D_POSIX_PTHREAD_SEMANTICS -D_XOPEN_SOURCE=500")
|
||||||
|
hadoop_add_linker_flags("-m64")
|
||||||
|
|
||||||
|
# CMAKE_SYSTEM_PROCESSOR is set to the output of 'uname -p', which on Solaris is
|
||||||
|
# the 'lowest' ISA supported, i.e. 'i386' or 'sparc'. However in order for the
|
||||||
|
# standard CMake modules to look in the right places it needs to reflect the required
|
||||||
|
# compilation mode, i.e. 64 bit. We therefore force it to either 'amd64' or 'sparcv9'.
|
||||||
|
if(CMAKE_SYSTEM_PROCESSOR STREQUAL "i386")
|
||||||
|
set(CMAKE_SYSTEM_PROCESSOR "amd64")
|
||||||
|
set(CMAKE_LIBRARY_ARCHITECTURE "amd64")
|
||||||
|
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparc")
|
||||||
|
set(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparcv9")
|
||||||
|
set(CMAKE_LIBRARY_ARCHITECTURE "sparcv9")
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Unrecognised CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR}")
|
||||||
|
endif()
|
||||||
|
endif()
|
|
@ -0,0 +1,97 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# Common JNI detection for CMake, shared by all Native components.
|
||||||
|
#
|
||||||
|
|
||||||
|
# Check the JVM_ARCH_DATA_MODEL variable as been set to 32 or 64 by maven.
|
||||||
|
if(NOT DEFINED JVM_ARCH_DATA_MODEL)
|
||||||
|
message(FATAL_ERROR "JVM_ARCH_DATA_MODEL is not defined")
|
||||||
|
elseif(NOT (JVM_ARCH_DATA_MODEL EQUAL 32 OR JVM_ARCH_DATA_MODEL EQUAL 64))
|
||||||
|
message(FATAL_ERROR "JVM_ARCH_DATA_MODEL is not 32 or 64")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Linux-specific JNI configuration.
|
||||||
|
#
|
||||||
|
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||||
|
# Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
|
||||||
|
# Since we were invoked from Maven, we know that the JAVA_HOME environment
|
||||||
|
# variable is valid. So we ignore system paths here and just use JAVA_HOME.
|
||||||
|
file(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _java_home)
|
||||||
|
if(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
|
||||||
|
set(_java_libarch "i386")
|
||||||
|
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
|
||||||
|
set(_java_libarch "amd64")
|
||||||
|
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
|
||||||
|
set(_java_libarch "arm")
|
||||||
|
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64le")
|
||||||
|
if(EXISTS "${_java_home}/jre/lib/ppc64le")
|
||||||
|
set(_java_libarch "ppc64le")
|
||||||
|
else()
|
||||||
|
set(_java_libarch "ppc64")
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
set(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
|
||||||
|
endif()
|
||||||
|
set(_JDK_DIRS "${_java_home}/jre/lib/${_java_libarch}/*"
|
||||||
|
"${_java_home}/jre/lib/${_java_libarch}"
|
||||||
|
"${_java_home}/jre/lib/*"
|
||||||
|
"${_java_home}/jre/lib"
|
||||||
|
"${_java_home}/lib/*"
|
||||||
|
"${_java_home}/lib"
|
||||||
|
"${_java_home}/include/*"
|
||||||
|
"${_java_home}/include"
|
||||||
|
"${_java_home}"
|
||||||
|
)
|
||||||
|
find_path(JAVA_INCLUDE_PATH
|
||||||
|
NAMES jni.h
|
||||||
|
PATHS ${_JDK_DIRS}
|
||||||
|
NO_DEFAULT_PATH)
|
||||||
|
#In IBM java, it's jniport.h instead of jni_md.h
|
||||||
|
find_path(JAVA_INCLUDE_PATH2
|
||||||
|
NAMES jni_md.h jniport.h
|
||||||
|
PATHS ${_JDK_DIRS}
|
||||||
|
NO_DEFAULT_PATH)
|
||||||
|
set(JNI_INCLUDE_DIRS ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
|
||||||
|
find_library(JAVA_JVM_LIBRARY
|
||||||
|
NAMES jvm JavaVM
|
||||||
|
PATHS ${_JDK_DIRS}
|
||||||
|
NO_DEFAULT_PATH)
|
||||||
|
set(JNI_LIBRARIES ${JAVA_JVM_LIBRARY})
|
||||||
|
unset(_java_libarch)
|
||||||
|
unset(_java_home)
|
||||||
|
|
||||||
|
message("JAVA_HOME=${JAVA_HOME}, JAVA_JVM_LIBRARY=${JAVA_JVM_LIBRARY}")
|
||||||
|
message("JAVA_INCLUDE_PATH=${JAVA_INCLUDE_PATH}, JAVA_INCLUDE_PATH2=${JAVA_INCLUDE_PATH2}")
|
||||||
|
if(JAVA_JVM_LIBRARY AND JAVA_INCLUDE_PATH AND JAVA_INCLUDE_PATH2)
|
||||||
|
message("Located all JNI components successfully.")
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Failed to find a viable JVM installation under JAVA_HOME.")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Use the standard FindJNI module to locate the JNI components.
|
||||||
|
find_package(JNI REQUIRED)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Otherwise, use the standard FindJNI module to locate the JNI components.
|
||||||
|
#
|
||||||
|
else()
|
||||||
|
find_package(JNI REQUIRED)
|
||||||
|
endif()
|
|
@ -16,209 +16,149 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# CMake configuration.
|
||||||
|
#
|
||||||
|
|
||||||
cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
|
cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
|
||||||
|
|
||||||
# Default to release builds
|
list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/..)
|
||||||
set(CMAKE_BUILD_TYPE, Release)
|
include(HadoopCommon)
|
||||||
|
|
||||||
include(JNIFlags.cmake NO_POLICY_SCOPE)
|
# Source and test locations.
|
||||||
|
set(SRC main/native/src/org/apache/hadoop)
|
||||||
# Compile a library with both shared and static variants
|
set(TST main/native/src/test/org/apache/hadoop)
|
||||||
function(add_dual_library LIBNAME)
|
|
||||||
add_library(${LIBNAME} SHARED ${ARGN})
|
|
||||||
add_library(${LIBNAME}_static STATIC ${ARGN})
|
|
||||||
set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
|
|
||||||
endfunction(add_dual_library)
|
|
||||||
|
|
||||||
# Link both a static and a dynamic target against some libraries
|
|
||||||
function(target_link_dual_libraries LIBNAME)
|
|
||||||
target_link_libraries(${LIBNAME} ${ARGN})
|
|
||||||
target_link_libraries(${LIBNAME}_static ${ARGN})
|
|
||||||
endfunction(target_link_dual_libraries)
|
|
||||||
|
|
||||||
function(output_directory TGT DIR)
|
|
||||||
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
|
|
||||||
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
|
|
||||||
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
|
|
||||||
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
|
|
||||||
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
|
|
||||||
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
|
|
||||||
endfunction(output_directory TGT DIR)
|
|
||||||
|
|
||||||
function(dual_output_directory TGT DIR)
|
|
||||||
output_directory(${TGT} "${DIR}")
|
|
||||||
output_directory(${TGT}_static "${DIR}")
|
|
||||||
endfunction(dual_output_directory TGT DIR)
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# This macro alters the behavior of find_package and find_library.
|
# Main configuration.
|
||||||
# It does this by setting the CMAKE_FIND_LIBRARY_SUFFIXES global variable.
|
|
||||||
# You should save that variable before calling this function and restore it
|
|
||||||
# after you have accomplished your goal.
|
|
||||||
#
|
#
|
||||||
# The behavior is altered in two ways:
|
|
||||||
# 1. We always find shared libraries, never static;
|
|
||||||
# 2. We find shared libraries with the given version number.
|
|
||||||
#
|
|
||||||
# On Windows this function is a no-op. Windows does not encode
|
|
||||||
# version number information information into library path names.
|
|
||||||
#
|
|
||||||
macro(set_find_shared_library_version LVERS)
|
|
||||||
IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
|
||||||
# Mac OS uses .dylib
|
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
|
|
||||||
ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
|
|
||||||
# FreeBSD has always .so installed.
|
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
|
|
||||||
ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
|
||||||
# Windows doesn't support finding shared libraries by version.
|
|
||||||
ELSE()
|
|
||||||
# Most UNIX variants use .so
|
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so.${LVERS}")
|
|
||||||
ENDIF()
|
|
||||||
endmacro(set_find_shared_library_version LVERS)
|
|
||||||
|
|
||||||
#
|
# The caller must specify where the generated headers have been placed.
|
||||||
# Alter the behavior of find_package and find_library so that we find only
|
if(NOT GENERATED_JAVAH)
|
||||||
# shared libraries without any version suffix. You should save
|
message(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH")
|
||||||
# CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
|
endif()
|
||||||
# afterwards.
|
|
||||||
#
|
|
||||||
macro(set_find_shared_library_without_version)
|
|
||||||
IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
|
||||||
# Mac OS uses .dylib
|
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib")
|
|
||||||
ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
|
||||||
# No effect
|
|
||||||
ELSE()
|
|
||||||
# Most UNIX variants use .so
|
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
|
|
||||||
ENDIF()
|
|
||||||
endmacro(set_find_shared_library_without_version)
|
|
||||||
|
|
||||||
if (NOT GENERATED_JAVAH)
|
# Configure JNI.
|
||||||
# Must identify where the generated headers have been placed
|
include(HadoopJNI)
|
||||||
MESSAGE(FATAL_ERROR "You must set the cmake variable GENERATED_JAVAH")
|
|
||||||
endif (NOT GENERATED_JAVAH)
|
|
||||||
find_package(JNI REQUIRED)
|
|
||||||
|
|
||||||
SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
# Require zlib.
|
||||||
set_find_shared_library_version("1")
|
set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
hadoop_set_find_shared_library_version("1")
|
||||||
find_package(ZLIB REQUIRED)
|
find_package(ZLIB REQUIRED)
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
get_filename_component(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
|
||||||
|
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
|
# Look for bzip2.
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_GNU_SOURCE")
|
set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64")
|
hadoop_set_find_shared_library_version("1")
|
||||||
set(D main/native/src/org/apache/hadoop)
|
|
||||||
set(T main/native/src/test/org/apache/hadoop)
|
|
||||||
|
|
||||||
GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
|
|
||||||
|
|
||||||
SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
|
||||||
set_find_shared_library_version("1")
|
|
||||||
find_package(BZip2 QUIET)
|
find_package(BZip2 QUIET)
|
||||||
if (BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
|
if(BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
|
||||||
GET_FILENAME_COMPONENT(HADOOP_BZIP2_LIBRARY ${BZIP2_LIBRARIES} NAME)
|
get_filename_component(HADOOP_BZIP2_LIBRARY ${BZIP2_LIBRARIES} NAME)
|
||||||
set(BZIP2_SOURCE_FILES
|
set(BZIP2_SOURCE_FILES
|
||||||
"${D}/io/compress/bzip2/Bzip2Compressor.c"
|
"${SRC}/io/compress/bzip2/Bzip2Compressor.c"
|
||||||
"${D}/io/compress/bzip2/Bzip2Decompressor.c")
|
"${SRC}/io/compress/bzip2/Bzip2Decompressor.c")
|
||||||
else (BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
|
set(REQUIRE_BZIP2 ${REQUIRE_BZIP2}) # Stop warning about unused variable.
|
||||||
|
else()
|
||||||
set(BZIP2_SOURCE_FILES "")
|
set(BZIP2_SOURCE_FILES "")
|
||||||
set(BZIP2_INCLUDE_DIR "")
|
set(BZIP2_INCLUDE_DIR "")
|
||||||
IF(REQUIRE_BZIP2)
|
if(REQUIRE_BZIP2)
|
||||||
MESSAGE(FATAL_ERROR "Required bzip2 library and/or header files could not be found.")
|
message(FATAL_ERROR "Required bzip2 library and/or header files could not be found.")
|
||||||
ENDIF(REQUIRE_BZIP2)
|
endif()
|
||||||
endif (BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
|
endif()
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
|
||||||
INCLUDE(CheckFunctionExists)
|
# Require snappy.
|
||||||
INCLUDE(CheckCSourceCompiles)
|
set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
INCLUDE(CheckLibraryExists)
|
hadoop_set_find_shared_library_version("1")
|
||||||
CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
|
find_library(SNAPPY_LIBRARY
|
||||||
CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
|
|
||||||
CHECK_LIBRARY_EXISTS(dl dlopen "" NEED_LINK_DL)
|
|
||||||
|
|
||||||
SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
|
||||||
set_find_shared_library_version("1")
|
|
||||||
find_library(SNAPPY_LIBRARY
|
|
||||||
NAMES snappy
|
NAMES snappy
|
||||||
PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
|
PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
|
||||||
${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
|
${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
find_path(SNAPPY_INCLUDE_DIR
|
find_path(SNAPPY_INCLUDE_DIR
|
||||||
NAMES snappy.h
|
NAMES snappy.h
|
||||||
PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
|
PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
|
||||||
${CUSTOM_SNAPPY_INCLUDE})
|
${CUSTOM_SNAPPY_INCLUDE})
|
||||||
if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
|
if(SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
|
||||||
GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
|
get_filename_component(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
|
||||||
set(SNAPPY_SOURCE_FILES
|
set(SNAPPY_SOURCE_FILES
|
||||||
"${D}/io/compress/snappy/SnappyCompressor.c"
|
"${SRC}/io/compress/snappy/SnappyCompressor.c"
|
||||||
"${D}/io/compress/snappy/SnappyDecompressor.c")
|
"${SRC}/io/compress/snappy/SnappyDecompressor.c")
|
||||||
else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
|
set(REQUIRE_SNAPPY ${REQUIRE_SNAPPY}) # Stop warning about unused variable.
|
||||||
|
message(STATUS "Found Snappy: ${SNAPPY_LIBRARY}")
|
||||||
|
else()
|
||||||
set(SNAPPY_INCLUDE_DIR "")
|
set(SNAPPY_INCLUDE_DIR "")
|
||||||
set(SNAPPY_SOURCE_FILES "")
|
set(SNAPPY_SOURCE_FILES "")
|
||||||
IF(REQUIRE_SNAPPY)
|
if(REQUIRE_SNAPPY)
|
||||||
MESSAGE(FATAL_ERROR "Required snappy library could not be found. SNAPPY_LIBRARY=${SNAPPY_LIBRARY}, SNAPPY_INCLUDE_DIR=${SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_INCLUDE_DIR=${CUSTOM_SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_PREFIX=${CUSTOM_SNAPPY_PREFIX}, CUSTOM_SNAPPY_INCLUDE=${CUSTOM_SNAPPY_INCLUDE}")
|
message(FATAL_ERROR "Required snappy library could not be found. SNAPPY_LIBRARY=${SNAPPY_LIBRARY}, SNAPPY_INCLUDE_DIR=${SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_INCLUDE_DIR=${CUSTOM_SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_PREFIX=${CUSTOM_SNAPPY_PREFIX}, CUSTOM_SNAPPY_INCLUDE=${CUSTOM_SNAPPY_INCLUDE}")
|
||||||
ENDIF(REQUIRE_SNAPPY)
|
endif()
|
||||||
endif (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
|
endif()
|
||||||
|
|
||||||
IF (CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
|
# Build hardware CRC32 acceleration, if supported on the platform.
|
||||||
set(BULK_CRC_ARCH_SOURCE_FIlE "${D}/util/bulk_crc32_x86.c")
|
if(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
|
||||||
ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "aarch64")
|
set(BULK_CRC_ARCH_SOURCE_FIlE "${SRC}/util/bulk_crc32_x86.c")
|
||||||
set(BULK_CRC_ARCH_SOURCE_FIlE "${D}/util/bulk_crc32_aarch64.c")
|
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "aarch64")
|
||||||
ELSE()
|
set(BULK_CRC_ARCH_SOURCE_FIlE "${SRC}/util/bulk_crc32_aarch64.c")
|
||||||
MESSAGE("No HW CRC acceleration for ${CMAKE_SYSTEM_PROCESSOR}, falling back to SW")
|
else()
|
||||||
ENDIF()
|
message("No HW CRC acceleration for ${CMAKE_SYSTEM_PROCESSOR}, falling back to SW")
|
||||||
|
endif()
|
||||||
|
|
||||||
# Find the no-suffix version of libcrypto.
|
# Find the no-suffix version of libcrypto/openssl. See HADOOP-11216 for details.
|
||||||
# See HADOOP-11216 for details.
|
set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
hadoop_set_find_shared_library_without_version()
|
||||||
set_find_shared_library_without_version()
|
set(OPENSSL_NAME "crypto")
|
||||||
SET(OPENSSL_NAME "crypto")
|
if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||||
IF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
|
||||||
SET(OPENSSL_NAME "eay32")
|
SET(OPENSSL_NAME "eay32")
|
||||||
ENDIF()
|
endif()
|
||||||
MESSAGE("CUSTOM_OPENSSL_PREFIX = ${CUSTOM_OPENSSL_PREFIX}")
|
message("CUSTOM_OPENSSL_PREFIX = ${CUSTOM_OPENSSL_PREFIX}")
|
||||||
find_library(OPENSSL_LIBRARY
|
find_library(OPENSSL_LIBRARY
|
||||||
NAMES ${OPENSSL_NAME}
|
NAMES ${OPENSSL_NAME}
|
||||||
PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/lib
|
PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/lib
|
||||||
${CUSTOM_OPENSSL_PREFIX}/lib64 ${CUSTOM_OPENSSL_LIB} NO_DEFAULT_PATH)
|
${CUSTOM_OPENSSL_PREFIX}/lib64 ${CUSTOM_OPENSSL_LIB} NO_DEFAULT_PATH)
|
||||||
find_library(OPENSSL_LIBRARY NAMES ${OPENSSL_NAME})
|
find_library(OPENSSL_LIBRARY NAMES ${OPENSSL_NAME})
|
||||||
find_path(OPENSSL_INCLUDE_DIR
|
find_path(OPENSSL_INCLUDE_DIR
|
||||||
NAMES openssl/evp.h
|
NAMES openssl/evp.h
|
||||||
PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/include
|
PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/include
|
||||||
${CUSTOM_OPENSSL_INCLUDE} NO_DEFAULT_PATH)
|
${CUSTOM_OPENSSL_INCLUDE} NO_DEFAULT_PATH)
|
||||||
find_path(OPENSSL_INCLUDE_DIR NAMES openssl/evp.h)
|
find_path(OPENSSL_INCLUDE_DIR NAMES openssl/evp.h)
|
||||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
SET(USABLE_OPENSSL 0)
|
set(USABLE_OPENSSL 0)
|
||||||
if (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
|
if(OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
|
||||||
INCLUDE(CheckCSourceCompiles)
|
include(CheckCSourceCompiles)
|
||||||
SET(OLD_CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES})
|
set(OLD_CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES})
|
||||||
SET(CMAKE_REQUIRED_INCLUDES ${OPENSSL_INCLUDE_DIR})
|
set(CMAKE_REQUIRED_INCLUDES ${OPENSSL_INCLUDE_DIR})
|
||||||
CHECK_C_SOURCE_COMPILES("#include \"${OPENSSL_INCLUDE_DIR}/openssl/evp.h\"\nint main(int argc, char **argv) { return !EVP_aes_256_ctr; }" HAS_NEW_ENOUGH_OPENSSL)
|
check_c_source_compiles("#include \"${OPENSSL_INCLUDE_DIR}/openssl/evp.h\"\nint main(int argc, char **argv) { return !EVP_aes_256_ctr; }" HAS_NEW_ENOUGH_OPENSSL)
|
||||||
SET(CMAKE_REQUIRED_INCLUDES ${OLD_CMAKE_REQUIRED_INCLUDES})
|
set(CMAKE_REQUIRED_INCLUDES ${OLD_CMAKE_REQUIRED_INCLUDES})
|
||||||
if(NOT HAS_NEW_ENOUGH_OPENSSL)
|
if(NOT HAS_NEW_ENOUGH_OPENSSL)
|
||||||
MESSAGE("The OpenSSL library installed at ${OPENSSL_LIBRARY} is too old. You need a version at least new enough to have EVP_aes_256_ctr.")
|
message("The OpenSSL library installed at ${OPENSSL_LIBRARY} is too old. You need a version at least new enough to have EVP_aes_256_ctr.")
|
||||||
else(NOT HAS_NEW_ENOUGH_OPENSSL)
|
else()
|
||||||
SET(USABLE_OPENSSL 1)
|
SET(USABLE_OPENSSL 1)
|
||||||
endif(NOT HAS_NEW_ENOUGH_OPENSSL)
|
endif()
|
||||||
endif (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
|
endif()
|
||||||
if (USABLE_OPENSSL)
|
if(USABLE_OPENSSL)
|
||||||
GET_FILENAME_COMPONENT(HADOOP_OPENSSL_LIBRARY ${OPENSSL_LIBRARY} NAME)
|
get_filename_component(HADOOP_OPENSSL_LIBRARY ${OPENSSL_LIBRARY} NAME)
|
||||||
SET(OPENSSL_SOURCE_FILES
|
set(OPENSSL_SOURCE_FILES
|
||||||
"${D}/crypto/OpensslCipher.c"
|
"${SRC}/crypto/OpensslCipher.c"
|
||||||
"${D}/crypto/random/OpensslSecureRandom.c")
|
"${SRC}/crypto/random/OpensslSecureRandom.c")
|
||||||
else (USABLE_OPENSSL)
|
set(REQUIRE_OPENSSL ${REQUIRE_OPENSSL}) # Stop warning about unused variable.
|
||||||
MESSAGE("Cannot find a usable OpenSSL library. OPENSSL_LIBRARY=${OPENSSL_LIBRARY}, OPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_LIB=${CUSTOM_OPENSSL_LIB}, CUSTOM_OPENSSL_PREFIX=${CUSTOM_OPENSSL_PREFIX}, CUSTOM_OPENSSL_INCLUDE=${CUSTOM_OPENSSL_INCLUDE}")
|
else()
|
||||||
IF(REQUIRE_OPENSSL)
|
message("Cannot find a usable OpenSSL library. OPENSSL_LIBRARY=${OPENSSL_LIBRARY}, OPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_LIB=${CUSTOM_OPENSSL_LIB}, CUSTOM_OPENSSL_PREFIX=${CUSTOM_OPENSSL_PREFIX}, CUSTOM_OPENSSL_INCLUDE=${CUSTOM_OPENSSL_INCLUDE}")
|
||||||
MESSAGE(FATAL_ERROR "Terminating build because require.openssl was specified.")
|
if(REQUIRE_OPENSSL)
|
||||||
ENDIF(REQUIRE_OPENSSL)
|
message(FATAL_ERROR "Terminating build because require.openssl was specified.")
|
||||||
SET(OPENSSL_LIBRARY "")
|
endif()
|
||||||
SET(OPENSSL_INCLUDE_DIR "")
|
set(OPENSSL_LIBRARY "")
|
||||||
SET(OPENSSL_SOURCE_FILES "")
|
set(OPENSSL_INCLUDE_DIR "")
|
||||||
endif (USABLE_OPENSSL)
|
set(OPENSSL_SOURCE_FILES "")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Check for platform-specific functions and libraries.
|
||||||
|
include(CheckFunctionExists)
|
||||||
|
include(CheckLibraryExists)
|
||||||
|
check_function_exists(sync_file_range HAVE_SYNC_FILE_RANGE)
|
||||||
|
check_function_exists(posix_fadvise HAVE_POSIX_FADVISE)
|
||||||
|
check_library_exists(dl dlopen "" NEED_LINK_DL)
|
||||||
|
|
||||||
|
# Configure the build.
|
||||||
include_directories(
|
include_directories(
|
||||||
${GENERATED_JAVAH}
|
${GENERATED_JAVAH}
|
||||||
main/native/src
|
main/native/src
|
||||||
|
@ -230,66 +170,60 @@ include_directories(
|
||||||
${BZIP2_INCLUDE_DIR}
|
${BZIP2_INCLUDE_DIR}
|
||||||
${SNAPPY_INCLUDE_DIR}
|
${SNAPPY_INCLUDE_DIR}
|
||||||
${OPENSSL_INCLUDE_DIR}
|
${OPENSSL_INCLUDE_DIR}
|
||||||
${D}/util
|
${SRC}/util
|
||||||
)
|
)
|
||||||
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
|
configure_file(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
|
||||||
|
|
||||||
add_executable(test_bulk_crc32
|
set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
|
||||||
${D}/util/bulk_crc32.c
|
hadoop_add_dual_library(hadoop
|
||||||
${BULK_CRC_ARCH_SOURCE_FIlE}
|
|
||||||
${T}/util/test_bulk_crc32.c
|
|
||||||
)
|
|
||||||
|
|
||||||
SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
|
|
||||||
add_dual_library(hadoop
|
|
||||||
main/native/src/exception.c
|
main/native/src/exception.c
|
||||||
${D}/io/compress/lz4/Lz4Compressor.c
|
${SRC}/io/compress/lz4/Lz4Compressor.c
|
||||||
${D}/io/compress/lz4/Lz4Decompressor.c
|
${SRC}/io/compress/lz4/Lz4Decompressor.c
|
||||||
${D}/io/compress/lz4/lz4.c
|
${SRC}/io/compress/lz4/lz4.c
|
||||||
${D}/io/compress/lz4/lz4hc.c
|
${SRC}/io/compress/lz4/lz4hc.c
|
||||||
${SNAPPY_SOURCE_FILES}
|
${SNAPPY_SOURCE_FILES}
|
||||||
${OPENSSL_SOURCE_FILES}
|
${OPENSSL_SOURCE_FILES}
|
||||||
${D}/io/compress/zlib/ZlibCompressor.c
|
${SRC}/io/compress/zlib/ZlibCompressor.c
|
||||||
${D}/io/compress/zlib/ZlibDecompressor.c
|
${SRC}/io/compress/zlib/ZlibDecompressor.c
|
||||||
${BZIP2_SOURCE_FILES}
|
${BZIP2_SOURCE_FILES}
|
||||||
${D}/io/nativeio/NativeIO.c
|
${SRC}/io/nativeio/NativeIO.c
|
||||||
${D}/io/nativeio/errno_enum.c
|
${SRC}/io/nativeio/errno_enum.c
|
||||||
${D}/io/nativeio/file_descriptor.c
|
${SRC}/io/nativeio/file_descriptor.c
|
||||||
${D}/io/nativeio/SharedFileDescriptorFactory.c
|
${SRC}/io/nativeio/SharedFileDescriptorFactory.c
|
||||||
${D}/net/unix/DomainSocket.c
|
${SRC}/net/unix/DomainSocket.c
|
||||||
${D}/net/unix/DomainSocketWatcher.c
|
${SRC}/net/unix/DomainSocketWatcher.c
|
||||||
${D}/security/JniBasedUnixGroupsMapping.c
|
${SRC}/security/JniBasedUnixGroupsMapping.c
|
||||||
${D}/security/JniBasedUnixGroupsNetgroupMapping.c
|
${SRC}/security/JniBasedUnixGroupsNetgroupMapping.c
|
||||||
${D}/security/hadoop_group_info.c
|
${SRC}/security/hadoop_group_info.c
|
||||||
${D}/security/hadoop_user_info.c
|
${SRC}/security/hadoop_user_info.c
|
||||||
${D}/util/NativeCodeLoader.c
|
${SRC}/util/NativeCodeLoader.c
|
||||||
${D}/util/NativeCrc32.c
|
${SRC}/util/NativeCrc32.c
|
||||||
${D}/util/bulk_crc32.c
|
${SRC}/util/bulk_crc32.c
|
||||||
${BULK_CRC_ARCH_SOURCE_FIlE}
|
${BULK_CRC_ARCH_SOURCE_FIlE}
|
||||||
)
|
)
|
||||||
if (NEED_LINK_DL)
|
if(NEED_LINK_DL)
|
||||||
set(LIB_DL dl)
|
set(LIB_DL dl)
|
||||||
endif (NEED_LINK_DL)
|
endif()
|
||||||
|
|
||||||
IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
|
hadoop_target_link_dual_libraries(hadoop ${LIB_DL} ${JAVA_JVM_LIBRARY})
|
||||||
#
|
set(LIBHADOOP_VERSION "1.0.0")
|
||||||
# By embedding '$ORIGIN' into the RPATH of libhadoop.so,
|
set_target_properties(hadoop PROPERTIES SOVERSION ${LIBHADOOP_VERSION})
|
||||||
# dlopen will look in the directory containing libhadoop.so.
|
hadoop_dual_output_directory(hadoop target/usr/local/lib)
|
||||||
# However, $ORIGIN is not supported by all operating systems.
|
|
||||||
#
|
# By embedding '$ORIGIN' into the RPATH of libhadoop.so, dlopen will look in
|
||||||
|
# the directory containing libhadoop.so. However, $ORIGIN is not supported by
|
||||||
|
# all operating systems.
|
||||||
|
if(${CMAKE_SYSTEM_NAME} MATCHES "Linux|SunOS")
|
||||||
set(RPATH "\$ORIGIN/")
|
set(RPATH "\$ORIGIN/")
|
||||||
if (EXTRA_LIBHADOOP_RPATH)
|
if(EXTRA_LIBHADOOP_RPATH)
|
||||||
set(RPATH "${RPATH}:${EXTRA_LIBHADOOP_RPATH}/")
|
set(RPATH "${RPATH}:${EXTRA_LIBHADOOP_RPATH}/")
|
||||||
endif(EXTRA_LIBHADOOP_RPATH)
|
endif()
|
||||||
SET_TARGET_PROPERTIES(hadoop
|
set_target_properties(hadoop PROPERTIES INSTALL_RPATH "${RPATH}")
|
||||||
PROPERTIES INSTALL_RPATH "${RPATH}")
|
endif()
|
||||||
ENDIF()
|
|
||||||
|
|
||||||
target_link_dual_libraries(hadoop
|
# Build the CRC32 test executable.
|
||||||
${LIB_DL}
|
add_executable(test_bulk_crc32
|
||||||
${JAVA_JVM_LIBRARY}
|
${SRC}/util/bulk_crc32.c
|
||||||
|
${BULK_CRC_ARCH_SOURCE_FIlE}
|
||||||
|
${TST}/util/test_bulk_crc32.c
|
||||||
)
|
)
|
||||||
SET(LIBHADOOP_VERSION "1.0.0")
|
|
||||||
SET_TARGET_PROPERTIES(hadoop PROPERTIES
|
|
||||||
SOVERSION ${LIBHADOOP_VERSION})
|
|
||||||
dual_output_directory(hadoop target/usr/local/lib)
|
|
||||||
|
|
Loading…
Reference in New Issue