HDFS-9325. libhdfs++ Allow the location of hadoop source tree resources to be passed to CMake during a build. Contributed by Bob Hansen.
This commit is contained in:
parent
5dc2da1e6f
commit
7d8452040d
|
@ -147,7 +147,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|||
<mkdir dir="${project.build.directory}/native"/>
|
||||
<exec executable="cmake" dir="${project.build.directory}/native"
|
||||
failonerror="true">
|
||||
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} -A '${env.PLATFORM}'"/>
|
||||
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DHADOOP_BUILD=1 -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} -A '${env.PLATFORM}'"/>
|
||||
<arg line="${native_cmake_args}"/>
|
||||
</exec>
|
||||
<exec executable="msbuild" dir="${project.build.directory}/native"
|
||||
|
@ -212,7 +212,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|||
<target>
|
||||
<mkdir dir="${project.build.directory}"/>
|
||||
<exec executable="cmake" dir="${project.build.directory}" failonerror="true">
|
||||
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} "/>
|
||||
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DHADOOP_BUILD=1 -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} "/>
|
||||
<arg line="${native_cmake_args}"/>
|
||||
</exec>
|
||||
<exec executable="make" dir="${project.build.directory}" failonerror="true">
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
|
||||
project (libhdfspp)
|
||||
|
||||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
enable_testing()
|
||||
include (CTest)
|
||||
|
||||
|
@ -59,10 +61,53 @@ add_custom_target(doc ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/doc/Doxy
|
|||
COMMENT "Generating API documentation with Doxygen" VERBATIM)
|
||||
endif(DOXYGEN_FOUND)
|
||||
|
||||
|
||||
# Copy files from the hadoop tree into the output/extern directory if
|
||||
# they've changed
|
||||
function (copy_on_demand input_src_glob input_dest_dir)
|
||||
get_filename_component(src_glob ${input_src_glob} REALPATH)
|
||||
get_filename_component(dest_dir ${input_dest_dir} REALPATH)
|
||||
get_filename_component(src_dir ${src_glob} DIRECTORY)
|
||||
message(STATUS "Syncing ${src_glob} to ${dest_dir}")
|
||||
|
||||
file(GLOB_RECURSE src_files ${src_glob})
|
||||
foreach(src_path ${src_files})
|
||||
file(RELATIVE_PATH relative_src ${src_dir} ${src_path})
|
||||
set(dest_path "${dest_dir}/${relative_src}")
|
||||
add_custom_command(TARGET copy_hadoop_files
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${src_path}" "${dest_path}"
|
||||
)
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
# If we're building in the hadoop tree, pull the Hadoop files that
|
||||
# libhdfspp depends on. This allows us to ensure that
|
||||
# the distribution will have a consistent set of headers and
|
||||
# .proto files
|
||||
if(HADOOP_BUILD)
|
||||
set(HADOOP_IMPORT_DIR ${PROJECT_BINARY_DIR}/extern)
|
||||
get_filename_component(HADOOP_IMPORT_DIR ${HADOOP_IMPORT_DIR} REALPATH)
|
||||
|
||||
add_custom_target(copy_hadoop_files ALL)
|
||||
|
||||
# Gather the Hadoop files and resources that libhdfs++ needs to build
|
||||
copy_on_demand(../libhdfs/include/*.h* ${HADOOP_IMPORT_DIR}/include)
|
||||
copy_on_demand(${CMAKE_CURRENT_LIST_DIR}/../../../../../hadoop-hdfs-client/src/main/proto/*.proto ${HADOOP_IMPORT_DIR}/proto/hdfs)
|
||||
copy_on_demand(${CMAKE_CURRENT_LIST_DIR}/../../../../../../hadoop-common-project/hadoop-common/src/main/proto/*.proto ${HADOOP_IMPORT_DIR}/proto/hadoop)
|
||||
copy_on_demand(${CMAKE_CURRENT_LIST_DIR}/../../../../../../hadoop-common-project/hadoop-common/src/test/proto/*.proto ${HADOOP_IMPORT_DIR}/proto/hadoop_test)
|
||||
else(HADOOP_BUILD)
|
||||
set(HADOOP_IMPORT_DIR ${CMAKE_CURRENT_LIST_DIR}/extern)
|
||||
endif(HADOOP_BUILD)
|
||||
|
||||
# Paths to find the imported files
|
||||
set(PROTO_HDFS_DIR ${HADOOP_IMPORT_DIR}/proto/hdfs)
|
||||
set(PROTO_HADOOP_DIR ${HADOOP_IMPORT_DIR}/proto/hadoop)
|
||||
set(PROTO_HADOOP_TEST_DIR ${HADOOP_IMPORT_DIR}/proto/hadoop_test)
|
||||
|
||||
include_directories(
|
||||
include
|
||||
lib
|
||||
../libhdfs/include
|
||||
${HADOOP_IMPORT_DIR}/include
|
||||
)
|
||||
|
||||
include_directories( SYSTEM
|
||||
|
@ -75,9 +120,6 @@ include_directories( SYSTEM
|
|||
${OPENSSL_INCLUDE_DIR}
|
||||
)
|
||||
|
||||
set(PROTO_HDFS_DIR ${CMAKE_CURRENT_LIST_DIR}/../../../../../hadoop-hdfs-client/src/main/proto)
|
||||
set(PROTO_HADOOP_DIR ${CMAKE_CURRENT_LIST_DIR}/../../../../../../hadoop-common-project/hadoop-common/src/main/proto)
|
||||
set(PROTO_HADOOP_TEST_DIR ${CMAKE_CURRENT_LIST_DIR}/../../../../../../hadoop-common-project/hadoop-common/src/test/proto)
|
||||
|
||||
add_subdirectory(third_party/gmock-1.7.0)
|
||||
add_subdirectory(lib)
|
||||
|
@ -88,20 +130,43 @@ add_subdirectory(tests)
|
|||
set(EMPTY_FILE_CC ${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/empty.cc)
|
||||
file(WRITE ${EMPTY_FILE_CC} "")
|
||||
|
||||
hadoop_add_dual_library(hdfspp ${EMPTY_FILE_CC})
|
||||
# Build the output libraries
|
||||
if(NEED_LINK_DL)
|
||||
set(LIB_DL dl)
|
||||
endif()
|
||||
|
||||
SET(LIBHDFSPP_SUBLIBS bindings_c fs rpc reader proto common)
|
||||
IF(${CMAKE_SYSTEM_NAME} MATCHES "Linux")
|
||||
# linking a shared library from static ones requires --whole-archive
|
||||
SET(LIBHDFSPP_SUBLIBS -Wl,--whole-archive ${LIBHDFSPP_SUBLIBS} -Wl,--no-whole-archive)
|
||||
ENDIF(${CMAKE_SYSTEM_NAME} MATCHES "Linux")
|
||||
|
||||
hadoop_target_link_dual_libraries(hdfspp
|
||||
${LIBHDFSPP_SUBLIBS}
|
||||
set(LIBHDFSPP_ALL_OBJECTS $<TARGET_OBJECTS:bindings_c_obj> $<TARGET_OBJECTS:fs_obj> $<TARGET_OBJECTS:rpc_obj> $<TARGET_OBJECTS:reader_obj> $<TARGET_OBJECTS:proto_obj> $<TARGET_OBJECTS:connection_obj> $<TARGET_OBJECTS:common_obj>)
|
||||
if (HADOOP_BUILD)
|
||||
hadoop_add_dual_library(hdfspp ${EMPTY_FILE_CC} ${LIBHDFSPP_ALL_OBJECTS})
|
||||
hadoop_target_link_dual_libraries(hdfspp
|
||||
${LIB_DL}
|
||||
${PROTOBUF_LIBRARY}
|
||||
${OPENSSL_LIBRARIES}
|
||||
)
|
||||
)
|
||||
else (HADOOP_BUILD)
|
||||
add_library(hdfspp_static STATIC ${EMPTY_FILE_CC} ${LIBHDFSPP_ALL_OBJECTS})
|
||||
target_link_libraries(hdfspp_static
|
||||
${LIB_DL}
|
||||
${PROTOBUF_LIBRARY}
|
||||
${OPENSSL_LIBRARIES}
|
||||
)
|
||||
add_library(hdfspp SHARED ${EMPTY_FILE_CC} ${LIBHDFSPP_ALL_OBJECTS})
|
||||
target_link_libraries(hdfspp_static
|
||||
${LIB_DL}
|
||||
${PROTOBUF_LIBRARY}
|
||||
${OPENSSL_LIBRARIES}
|
||||
)
|
||||
endif (HADOOP_BUILD)
|
||||
set(LIBHDFSPP_VERSION "0.1.0")
|
||||
set_target_properties(hdfspp PROPERTIES
|
||||
SOVERSION ${LIBHDFSPP_VERSION})
|
||||
|
||||
# Set up make install targets
|
||||
# Can be installed to a particular location via "make DESTDIR=... install"
|
||||
file(GLOB_RECURSE LIBHDFSPP_HEADER_FILES "${CMAKE_CURRENT_LIST_DIR}/include/*.h*")
|
||||
file(GLOB_RECURSE LIBHDFS_HEADER_FILES "${HADOOP_IMPORT_DIR}/include/*.h*")
|
||||
install(FILES ${LIBHDFSPP_HEADER_FILES} DESTINATION /include/libhdfspp)
|
||||
install(FILES ${LIBHDFS_HEADER_FILES} DESTINATION /include/libhdfs)
|
||||
|
||||
install(TARGETS hdfspp_static ARCHIVE DESTINATION /lib)
|
||||
install(TARGETS hdfspp LIBRARY DESTINATION /lib)
|
||||
|
|
|
@ -16,5 +16,6 @@
|
|||
# under the License.
|
||||
|
||||
|
||||
add_library(bindings_c hdfs.cc)
|
||||
add_dependencies(bindings_c fs rpc reader proto common fs rpc reader proto common)
|
||||
add_library(bindings_c_obj OBJECT hdfs.cc)
|
||||
add_dependencies(bindings_c_obj fs rpc reader proto common fs rpc reader proto common)
|
||||
add_library(bindings_c $<TARGET_OBJECTS:bindings_c_obj>)
|
||||
|
|
|
@ -15,4 +15,10 @@
|
|||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
add_library(common base64.cc status.cc sasl_digest_md5.cc hdfs_public_api.cc options.cc configuration.cc configuration_loader.cc hdfs_configuration.cc util.cc retry_policy.cc)
|
||||
if(NEED_LINK_DL)
|
||||
set(LIB_DL dl)
|
||||
endif()
|
||||
|
||||
add_library(common_obj OBJECT base64.cc status.cc sasl_digest_md5.cc hdfs_public_api.cc options.cc configuration.cc configuration_loader.cc hdfs_configuration.cc util.cc retry_policy.cc)
|
||||
add_library(common $<TARGET_OBJECTS:common_obj>)
|
||||
target_link_libraries(common ${LIB_DL})
|
||||
|
|
|
@ -16,5 +16,6 @@
|
|||
# limitations under the License.
|
||||
#
|
||||
|
||||
add_library(connection datanodeconnection.cc)
|
||||
add_dependencies(connection proto)
|
||||
add_library(connection_obj OBJECT datanodeconnection.cc)
|
||||
add_dependencies(connection_obj proto)
|
||||
add_library(connection $<TARGET_OBJECTS:connection_obj>)
|
||||
|
|
|
@ -16,5 +16,6 @@
|
|||
# limitations under the License.
|
||||
#
|
||||
|
||||
add_library(fs filesystem.cc filehandle.cc bad_datanode_tracker.cc)
|
||||
add_dependencies(fs proto)
|
||||
add_library(fs_obj OBJECT filesystem.cc filehandle.cc bad_datanode_tracker.cc)
|
||||
add_dependencies(fs_obj proto)
|
||||
add_library(fs $<TARGET_OBJECTS:fs_obj>)
|
||||
|
|
|
@ -79,4 +79,8 @@ gen_hrpc(HRPC_SRCS
|
|||
${PROTO_HDFS_DIR}/ClientNamenodeProtocol.proto
|
||||
)
|
||||
|
||||
add_library(proto ${PROTO_SRCS} ${PROTO_HDRS} ${HRPC_SRCS})
|
||||
add_library(proto_obj OBJECT ${PROTO_SRCS} ${PROTO_HDRS} ${HRPC_SRCS})
|
||||
if(HADOOP_BUILD)
|
||||
add_dependencies(proto_obj copy_hadoop_files)
|
||||
endif(HADOOP_BUILD)
|
||||
add_library(proto $<TARGET_OBJECTS:proto_obj>)
|
||||
|
|
|
@ -16,5 +16,6 @@
|
|||
# limitations under the License.
|
||||
#
|
||||
|
||||
add_library(reader block_reader.cc datatransfer.cc)
|
||||
add_dependencies(reader proto)
|
||||
add_library(reader_obj OBJECT block_reader.cc datatransfer.cc)
|
||||
add_dependencies(reader_obj proto)
|
||||
add_library(reader $<TARGET_OBJECTS:reader_obj>)
|
||||
|
|
|
@ -17,5 +17,6 @@
|
|||
#
|
||||
|
||||
include_directories(${OPENSSL_INCLUDE_DIRS})
|
||||
add_library(rpc rpc_connection.cc rpc_engine.cc)
|
||||
add_dependencies(rpc proto)
|
||||
add_library(rpc_obj OBJECT rpc_connection.cc rpc_engine.cc)
|
||||
add_dependencies(rpc_obj proto)
|
||||
add_library(rpc $<TARGET_OBJECTS:rpc_obj>)
|
||||
|
|
|
@ -32,9 +32,8 @@ include_directories(
|
|||
${LIBHDFS_SRC_DIR}
|
||||
${OS_DIR}
|
||||
)
|
||||
add_library(hdfspp_test_shim_static STATIC hdfs_shim.c libhdfs_wrapper.c libhdfspp_wrapper.cc ${LIBHDFSPP_BINDING_C}/hdfs.cc)
|
||||
|
||||
add_library(test_common OBJECT mock_connection.cc)
|
||||
add_library(test_common_obj OBJECT mock_connection.cc)
|
||||
add_library(test_common $<TARGET_OBJECTS:test_common_obj>)
|
||||
|
||||
set(PROTOBUF_IMPORT_DIRS ${PROTO_HADOOP_TEST_DIR})
|
||||
|
||||
|
@ -56,8 +55,8 @@ function(add_memcheck_test name binary)
|
|||
endfunction(add_memcheck_test)
|
||||
|
||||
|
||||
add_executable(remote_block_reader_test remote_block_reader_test.cc $<TARGET_OBJECTS:test_common>)
|
||||
target_link_libraries(remote_block_reader_test reader proto common connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_executable(remote_block_reader_test remote_block_reader_test.cc)
|
||||
target_link_libraries(remote_block_reader_test test_common reader proto common connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_memcheck_test(remote_block_reader remote_block_reader_test)
|
||||
|
||||
add_executable(sasl_digest_md5_test sasl_digest_md5_test.cc)
|
||||
|
@ -69,8 +68,8 @@ target_link_libraries(retry_policy_test common gmock_main ${CMAKE_THREAD_LIBS_IN
|
|||
add_memcheck_test(retry_policy retry_policy_test)
|
||||
|
||||
include_directories(${CMAKE_CURRENT_BINARY_DIR})
|
||||
add_executable(rpc_engine_test rpc_engine_test.cc ${PROTO_TEST_SRCS} ${PROTO_TEST_HDRS} $<TARGET_OBJECTS:test_common>)
|
||||
target_link_libraries(rpc_engine_test rpc proto common ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_executable(rpc_engine_test rpc_engine_test.cc ${PROTO_TEST_SRCS} ${PROTO_TEST_HDRS})
|
||||
target_link_libraries(rpc_engine_test test_common rpc proto common ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_memcheck_test(rpc_engine rpc_engine_test)
|
||||
|
||||
add_executable(bad_datanode_test bad_datanode_test.cc)
|
||||
|
@ -89,6 +88,11 @@ add_executable(hdfs_configuration_test hdfs_configuration_test.cc)
|
|||
target_link_libraries(hdfs_configuration_test common gmock_main ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_test(hdfs_configuration hdfs_configuration_test)
|
||||
|
||||
#This test requires a great deal of Hadoop Java infrastructure to run.
|
||||
if(HADOOP_BUILD)
|
||||
add_library(hdfspp_test_shim_static STATIC hdfs_shim.c libhdfs_wrapper.c libhdfspp_wrapper.cc ${LIBHDFSPP_BINDING_C}/hdfs.cc)
|
||||
|
||||
build_libhdfs_test(libhdfs_threaded hdfspp_test_shim_static expect.c test_libhdfs_threaded.c ${OS_DIR}/thread.c)
|
||||
link_libhdfs_test(libhdfs_threaded hdfspp_test_shim_static fs reader rpc proto common connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} native_mini_dfs ${JAVA_JVM_LIBRARY})
|
||||
add_libhdfs_test(libhdfs_threaded hdfspp_test_shim_static)
|
||||
endif(HADOOP_BUILD)
|
||||
|
|
|
@ -17,4 +17,5 @@
|
|||
#
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-missing-field-initializers -Wno-unused-const-variable")
|
||||
add_library(gmock_main gmock-gtest-all.cc gmock_main.cc)
|
||||
add_library(gmock_main_obj OBJECT gmock-gtest-all.cc gmock_main.cc)
|
||||
add_library(gmock_main $<TARGET_OBJECTS:gmock_main_obj>)
|
||||
|
|
Loading…
Reference in New Issue