From 1156fc41dc1939ddeaae3bbe5dc025555b82bbff Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Thu, 7 Jun 2012 17:58:19 +0000 Subject: [PATCH] svn merge -c -1347094 for reverting HADOOP-8368 again. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1347739 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 2 - hadoop-common-project/hadoop-common/pom.xml | 101 +++++-- .../hadoop-common/src/CMakeLists.txt | 131 --------- .../hadoop-common/src/config.h.cmake | 10 - .../src/main/native/.autom4te.cfg | 42 +++ .../hadoop-common/src/main/native/Makefile.am | 66 +++++ .../src/main/native/acinclude.m4 | 28 ++ .../src/main/native/configure.ac | 130 +++++++++ .../src/main/native/lib/Makefile.am | 47 +++ .../hadoop/io/compress/lz4/Lz4Compressor.c | 5 +- .../hadoop/io/compress/lz4/Lz4Decompressor.c | 5 +- .../io/compress/snappy/SnappyCompressor.c | 36 ++- .../io/compress/snappy/SnappyDecompressor.c | 36 ++- .../org_apache_hadoop_io_compress_snappy.h | 41 ++- .../hadoop/io/compress/zlib/Makefile.am | 53 ++++ .../hadoop/io/compress/zlib/ZlibCompressor.c | 32 ++- .../io/compress/zlib/ZlibDecompressor.c | 32 ++- .../zlib/org_apache_hadoop_io_compress_zlib.h | 39 ++- .../org/apache/hadoop/io/nativeio/NativeIO.c | 4 +- .../src/org/apache/hadoop/util/NativeCrc32.c | 4 +- .../src/main/native/src/org_apache_hadoop.h | 17 +- hadoop-hdfs-project/hadoop-hdfs/pom.xml | 74 ++++- .../hadoop-hdfs/src/CMakeLists.txt | 123 -------- .../hadoop-hdfs/src/config.h.cmake | 6 - .../src/contrib/fuse-dfs/Makefile.am | 27 ++ .../src/contrib/fuse-dfs/acinclude.m4 | 270 ++++++++++++++++++ .../src/contrib/fuse-dfs/configure.ac | 82 ++++++ .../hadoop-hdfs/src/contrib/fuse-dfs/pom.xml | 164 +++++++++++ .../src/contrib/fuse-dfs/src/CMakeLists.txt | 73 ----- .../src/contrib/fuse-dfs/src/Makefile.am | 22 ++ .../src/contrib/fuse-dfs/src/fuse_dfs.h | 8 +- .../hadoop-hdfs/src/main/native/Makefile.am | 42 +++ .../hadoop-hdfs/src/main/native/configure.ac | 125 ++++++++ .../src/main/native/m4/apfunctions.m4 | 41 +++ .../hadoop-hdfs/src/main/native/m4/apjava.m4 | 142 +++++++++ .../src/main/native/m4/apsupport.m4 | 168 +++++++++++ hadoop-hdfs-project/pom.xml | 1 + .../hadoop-yarn-server-nodemanager/pom.xml | 64 +++-- .../src/CMakeLists.txt | 66 ----- .../src/config.h.cmake | 6 - .../native/container-executor/.autom4te.cfg | 42 +++ .../.deps/container-executor.Po | 1 + .../native/container-executor/Makefile.am | 32 +++ .../native/container-executor/configure.ac | 54 ++++ .../native/container-executor/impl/main.c | 5 +- 45 files changed, 1989 insertions(+), 510 deletions(-) delete mode 100644 hadoop-common-project/hadoop-common/src/CMakeLists.txt delete mode 100644 hadoop-common-project/hadoop-common/src/config.h.cmake create mode 100644 hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg create mode 100644 hadoop-common-project/hadoop-common/src/main/native/Makefile.am create mode 100644 hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 create mode 100644 hadoop-common-project/hadoop-common/src/main/native/configure.ac create mode 100644 hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am create mode 100644 hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am delete mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt delete mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/config.h.cmake create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/acinclude.m4 create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/pom.xml delete mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 delete mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt delete mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake create mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg create mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po create mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am create mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index b4560575bba..09d84994c0f 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -28,8 +28,6 @@ Release 2.0.1-alpha - UNRELEASED HADOOP-8358. Config-related WARN for dfs.web.ugi can be avoided. (harsh) - HADOOP-8368. Use CMake rather than autotools to build native code (ccccabe via tucu) - BUG FIXES HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index a9a003a84bb..ba798495545 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -527,10 +527,31 @@ /usr/local ${snappy.prefix}/lib ${snappy.prefix}/include - + + org.apache.maven.plugins + maven-antrun-plugin + + + compile + compile + + run + + + + + + + + + + + + + org.codehaus.mojo native-maven-plugin @@ -560,27 +581,73 @@ - org.apache.maven.plugins - maven-antrun-plugin + org.codehaus.mojo + make-maven-plugin - make + compile compile - run - - - - - - - - - - - - + + autoreconf + configure + make-install + + + + ${project.build.directory}/native + + -i + -f + + + + + + OS_NAME + ${os.name} + + + OS_ARCH + ${os.arch} + + + JVM_DATA_MODEL + ${sun.arch.data.model} + + + + CPPFLAGS=-I${snappy.include} + LDFLAGS=-L${snappy.lib} + + ${project.build.directory}/native + /usr/local + + + + + OS_NAME + ${os.name} + + + OS_ARCH + ${os.arch} + + + JVM_DATA_MODEL + ${sun.arch.data.model} + + + HADOOP_NATIVE_SRCDIR + ${project.build.directory}/native + + + + + ${project.build.directory}/native/target + + diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt deleted file mode 100644 index 127e2d9c20f..00000000000 --- a/hadoop-common-project/hadoop-common/src/CMakeLists.txt +++ /dev/null @@ -1,131 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -cmake_minimum_required(VERSION 2.6 FATAL_ERROR) - -# Default to release builds -set(CMAKE_BUILD_TYPE, Release) - -# If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit. -# This variable is set by maven. -if (JVM_ARCH_DATA_MODEL EQUAL 32) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32") - set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32") -endif (JVM_ARCH_DATA_MODEL EQUAL 32) - -# Compile a library with both shared and static variants -function(add_dual_library LIBNAME) - add_library(${LIBNAME} SHARED ${ARGN}) - add_library(${LIBNAME}_static STATIC ${ARGN}) - set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME}) -endfunction(add_dual_library) - -# Link both a static and a dynamic target against some libraries -function(target_link_dual_libraries LIBNAME) - target_link_libraries(${LIBNAME} ${ARGN}) - target_link_libraries(${LIBNAME}_static ${ARGN}) -endfunction(target_link_dual_libraries) - -function(output_directory TGT DIR) - SET_TARGET_PROPERTIES(${TGT} PROPERTIES - RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") - SET_TARGET_PROPERTIES(${TGT} PROPERTIES - ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") - SET_TARGET_PROPERTIES(${TGT} PROPERTIES - LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") -endfunction(output_directory TGT DIR) - -function(dual_output_directory TGT DIR) - output_directory(${TGT} "${DIR}") - output_directory(${TGT}_static "${DIR}") -endfunction(dual_output_directory TGT DIR) - -if (NOT GENERATED_JAVAH) - # Must identify where the generated headers have been placed - MESSAGE(FATAL_ERROR "You must set the cmake variable GENERATED_JAVAH") -endif (NOT GENERATED_JAVAH) -find_package(JNI REQUIRED) -find_package(ZLIB REQUIRED) - -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2") -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64") -set(D main/native/src/org/apache/hadoop) - -GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME) - -INCLUDE(CheckFunctionExists) -INCLUDE(CheckCSourceCompiles) -CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE) -CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE) - -find_library(SNAPPY_LIBRARY NAMES snappy PATHS) -find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS) -if (SNAPPY_LIBRARY) - GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME) - set(SNAPPY_SOURCE_FILES - "${D}/io/compress/snappy/SnappyCompressor.c" - "${D}/io/compress/snappy/SnappyDecompressor.c") -else (${SNAPPY_LIBRARY}) - set(SNAPPY_INCLUDE_DIR "") - set(SNAPPY_SOURCE_FILES "") -endif (SNAPPY_LIBRARY) - -include_directories( - ${GENERATED_JAVAH} - main/native/src - ${CMAKE_CURRENT_SOURCE_DIR} - ${CMAKE_CURRENT_SOURCE_DIR}/src - ${CMAKE_BINARY_DIR} - ${JNI_INCLUDE_DIRS} - ${ZLIB_INCLUDE_DIRS} - ${SNAPPY_INCLUDE_DIR} -) -CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h) - -add_dual_library(hadoop - ${D}/io/compress/lz4/Lz4Compressor.c - ${D}/io/compress/lz4/Lz4Decompressor.c - ${D}/io/compress/lz4/lz4.c - ${SNAPPY_SOURCE_FILES} - ${D}/io/compress/zlib/ZlibCompressor.c - ${D}/io/compress/zlib/ZlibDecompressor.c - ${D}/io/nativeio/NativeIO.c - ${D}/io/nativeio/errno_enum.c - ${D}/io/nativeio/file_descriptor.c - ${D}/security/JniBasedUnixGroupsMapping.c - ${D}/security/JniBasedUnixGroupsNetgroupMapping.c - ${D}/security/getGroup.c - ${D}/util/NativeCrc32.c - ${D}/util/bulk_crc32.c -) -target_link_dual_libraries(hadoop - dl - ${JAVA_JVM_LIBRARY} -) -SET(LIBHADOOP_VERSION "1.0.0") -SET_TARGET_PROPERTIES(hadoop PROPERTIES - SOVERSION ${LIBHADOOP_VERSION}) -dual_output_directory(hadoop target/usr/local/lib) - -if (HADOOP_RUNAS_HOME) - add_executable(runAs - test/system/c++/runAs/runAs.c - test/system/c++/runAs/main.c - ) - output_directory(runAs target/usr/local/bin) -endif (HADOOP_RUNAS_HOME) diff --git a/hadoop-common-project/hadoop-common/src/config.h.cmake b/hadoop-common-project/hadoop-common/src/config.h.cmake deleted file mode 100644 index 9098b68b87e..00000000000 --- a/hadoop-common-project/hadoop-common/src/config.h.cmake +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef CONFIG_H -#define CONFIG_H - -#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@" -#cmakedefine HADOOP_RUNAS_HOME "@HADOOP_RUNAS_HOME@" -#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@" -#cmakedefine HAVE_SYNC_FILE_RANGE -#cmakedefine HAVE_POSIX_FADVISE - -#endif diff --git a/hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg b/hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg new file mode 100644 index 00000000000..a69c197883f --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# autom4te configuration for hadoop-native library +# + +begin-language: "Autoheader-preselections" +args: --no-cache +end-language: "Autoheader-preselections" + +begin-language: "Automake-preselections" +args: --no-cache +end-language: "Automake-preselections" + +begin-language: "Autoreconf-preselections" +args: --no-cache +end-language: "Autoreconf-preselections" + +begin-language: "Autoconf-without-aclocal-m4" +args: --no-cache +end-language: "Autoconf-without-aclocal-m4" + +begin-language: "Autoconf" +args: --no-cache +end-language: "Autoconf" + diff --git a/hadoop-common-project/hadoop-common/src/main/native/Makefile.am b/hadoop-common-project/hadoop-common/src/main/native/Makefile.am new file mode 100644 index 00000000000..c4ca564c2be --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/native/Makefile.am @@ -0,0 +1,66 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# Notes: +# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os-arch}. +# 2. This makefile depends on the following environment variables to function correctly: +# * HADOOP_NATIVE_SRCDIR +# * JAVA_HOME +# * JVM_DATA_MODEL +# * OS_NAME +# * OS_ARCH +# All these are setup by build.xml. +# + +# Export $(PLATFORM) to prevent proliferation of sub-shells +export PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z]) + +ACLOCAL_AMFLAGS = -I m4 +AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \ + -I$(HADOOP_NATIVE_SRCDIR)/javah +AM_LDFLAGS = @JNI_LDFLAGS@ +AM_CFLAGS = -g -Wall -fPIC -O2 +if SPECIFY_DATA_MODEL +AM_LDFLAGS += -m$(JVM_DATA_MODEL) +AM_CFLAGS += -m$(JVM_DATA_MODEL) +endif + +lib_LTLIBRARIES = libhadoop.la +libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \ + src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \ + src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c \ + src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c \ + src/org/apache/hadoop/io/compress/lz4/lz4.c \ + src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c \ + src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c \ + src/org/apache/hadoop/security/getGroup.c \ + src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \ + src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \ + src/org/apache/hadoop/io/nativeio/file_descriptor.c \ + src/org/apache/hadoop/io/nativeio/errno_enum.c \ + src/org/apache/hadoop/io/nativeio/NativeIO.c \ + src/org/apache/hadoop/util/NativeCrc32.c \ + src/org/apache/hadoop/util/bulk_crc32.c + +libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS) +libhadoop_la_LIBADD = -ldl -ljvm + +# +#vim: sw=4: ts=4: noet +# diff --git a/hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 b/hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 new file mode 100644 index 00000000000..93e05b8148d --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 @@ -0,0 +1,28 @@ +# AC_COMPUTE_NEEDED_DSO(LIBRARY, TEST_PROGRAM, PREPROC_SYMBOL) +# -------------------------------------------------- +# Compute the 'actual' dynamic-library used +# for LIBRARY and set it to PREPROC_SYMBOL +AC_DEFUN([AC_COMPUTE_NEEDED_DSO], +[ +AC_CACHE_CHECK([Checking for the 'actual' dynamic-library for '-l$1'], ac_cv_libname_$1, + [ + echo '$2' > conftest.c + if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -l$1 2>&1`"; then + dnl Try objdump and ldd in that order to get the dynamic library + if test ! -z "`which objdump | grep -v 'no objdump'`"; then + ac_cv_libname_$1="`objdump -p conftest | grep NEEDED | grep $1 | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`" + elif test ! -z "`which ldd | grep -v 'no ldd'`"; then + ac_cv_libname_$1="`ldd conftest | grep $1 | sed 's/^[[[^A-Za-z0-9]]]*\([[[A-Za-z0-9\.]]]*\)[[[^A-Za-z0-9]]]*=>.*$/\"\1\"/'`" + elif test ! -z "`which otool | grep -v 'no otool'`"; then + ac_cv_libname_$1=\"`otool -L conftest | grep $1 | sed -e 's/^[ ]*//' -e 's/ .*//' -e 's/.*\/\(.*\)$/\1/'`\"; + else + AC_MSG_ERROR(Can't find either 'objdump' or 'ldd' or 'otool' to compute the dynamic library for '-l$1') + fi + else + ac_cv_libname_$1=libnotfound.so + fi + rm -f conftest* + ] +) +AC_DEFINE_UNQUOTED($3, ${ac_cv_libname_$1}, [The 'actual' dynamic-library for '-l$1']) +])# AC_COMPUTE_NEEDED_DSO diff --git a/hadoop-common-project/hadoop-common/src/main/native/configure.ac b/hadoop-common-project/hadoop-common/src/main/native/configure.ac new file mode 100644 index 00000000000..34408d64182 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/native/configure.ac @@ -0,0 +1,130 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# configure.ac for hadoop native code. +# + +# Notes: +# 1. This configure.ac depends on the following environment variables to function correctly: +# * HADOOP_NATIVE_SRCDIR +# * JAVA_HOME +# * JVM_DATA_MODEL +# * OS_NAME +# * OS_ARCH +# All these are setup by build.xml. + +# -*- Autoconf -*- +# Process this file with autoconf to produce a configure script. +# + +AC_PREREQ(2.59) +AC_INIT(src/org_apache_hadoop.h) +AC_CONFIG_SRCDIR([src/org_apache_hadoop.h]) +AC_CONFIG_AUX_DIR([config]) +AC_CONFIG_MACRO_DIR([m4]) +AC_CONFIG_HEADER([config.h]) +AC_SYS_LARGEFILE +AC_GNU_SOURCE + +AM_INIT_AUTOMAKE(hadoop,1.0.0) + +# Checks for programs. +AC_PROG_CC +AC_PROG_LIBTOOL + +# Checks for libraries. +dnl Check for '-ldl' +AC_CHECK_LIB([dl], [dlopen]) + +dnl Check for '-ljvm' +JNI_LDFLAGS="" +if test $JAVA_HOME != "" +then + JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server" + JVMSOPATH=`find $JAVA_HOME/jre/ -name libjvm.so | head -n 1` + JNI_LDFLAGS="$JNI_LDFLAGS -L`dirname $JVMSOPATH`" +fi +LDFLAGS="$LDFLAGS $JNI_LDFLAGS" +AC_CHECK_LIB([jvm], [JNI_GetCreatedJavaVMs]) +AC_SUBST([JNI_LDFLAGS]) + +# Checks for header files. +dnl Check for Ansi C headers +AC_HEADER_STDC + +dnl Check for other standard C headers +AC_CHECK_HEADERS([stdio.h stddef.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.)) + +dnl Check for JNI headers +JNI_CPPFLAGS="" +if test $JAVA_HOME != "" +then + for dir in `find $JAVA_HOME/include -follow -type d` + do + JNI_CPPFLAGS="$JNI_CPPFLAGS -I$dir" + done +fi +cppflags_bak=$CPPFLAGS +CPPFLAGS="$CPPFLAGS $JNI_CPPFLAGS" +AC_CHECK_HEADERS([jni.h], [], AC_MSG_ERROR([Native java headers not found. Is \$JAVA_HOME set correctly?])) +CPPFLAGS=$cppflags_bak +AC_SUBST([JNI_CPPFLAGS]) + +dnl Check for zlib headers +AC_CHECK_HEADERS([zlib.h zconf.h], + AC_COMPUTE_NEEDED_DSO(z, + [#include "zlib.h" + int main(int argc, char **argv){zlibVersion();return 0;}], + HADOOP_ZLIB_LIBRARY), + AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.)) + +dnl Check for snappy headers +AC_CHECK_HEADERS([snappy-c.h], + AC_COMPUTE_NEEDED_DSO(snappy, + [#include "snappy-c.h" + int main(int argc, char **argv){snappy_compress(0,0,0,0);return 0;}], + HADOOP_SNAPPY_LIBRARY), + AC_MSG_WARN(Snappy headers were not found... building without snappy.)) + +dnl Check for headers needed by the native Group resolution implementation +AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.)) + +dnl check for posix_fadvise +AC_CHECK_HEADERS(fcntl.h, [AC_CHECK_FUNCS(posix_fadvise)]) + +dnl check for sync_file_range +AC_CHECK_HEADERS(fcntl.h, [AC_CHECK_FUNCS(sync_file_range)]) + +# Checks for typedefs, structures, and compiler characteristics. +AC_C_CONST + +# Checks for library functions. +AC_CHECK_FUNCS([memset]) + +# Check for nonstandard STRERROR_R +AC_FUNC_STRERROR_R + +AM_CONDITIONAL([SPECIFY_DATA_MODEL], [case $host_cpu in arm*) false;; *) true;; esac]) + +AC_CONFIG_FILES([Makefile]) +AC_OUTPUT + +# +#vim: sw=2: ts=2: noet +# diff --git a/hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am b/hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am new file mode 100644 index 00000000000..9b536ff440c --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am @@ -0,0 +1,47 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# Makefile template for building libhadoop.so +# + +# +# Notes: +# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/lib +# 2. This makefile depends on the following environment variables to function correctly: +# * HADOOP_NATIVE_SRCDIR +# * JAVA_HOME +# * OS_ARCH +# All these are setup by build.xml and/or the top-level makefile. +# + +# Add .lo files in $(SUBDIRS) to construct libhadoop.so +HADOOP_OBJS = $(foreach path,$(addprefix ../,$(SUBDIRS)),$(wildcard $(path)/*.lo)) +AM_LDFLAGS = @JNI_LDFLAGS@ +if SPECIFY_DATA_MODEL +AM_LDFLAGS += -m$(JVM_DATA_MODEL) +endif + +lib_LTLIBRARIES = libhadoop.la +libhadoop_la_SOURCES = +libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS) +libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm + +# +#vim: sw=4: ts=4: noet +# diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c index 641ecd73b7a..d52a4f6b2a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c @@ -16,7 +16,10 @@ * limitations under the License. */ -#include "config.h" +#if defined HAVE_CONFIG_H + #include +#endif + #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c index 3eebc1859d8..547b027cc14 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c @@ -16,7 +16,10 @@ * limitations under the License. */ -#include "config.h" +#if defined HAVE_CONFIG_H + #include +#endif + #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c index 96a2402ae7a..13991c23f4f 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c @@ -16,12 +16,36 @@ * limitations under the License. */ -#include -#include -#include -#include +#if defined HAVE_CONFIG_H + #include +#endif + +#if defined HADOOP_SNAPPY_LIBRARY + +#if defined HAVE_STDIO_H + #include +#else + #error 'stdio.h not found' +#endif + +#if defined HAVE_STDLIB_H + #include +#else + #error 'stdlib.h not found' +#endif + +#if defined HAVE_STRING_H + #include +#else + #error 'string.h not found' +#endif + +#if defined HAVE_DLFCN_H + #include +#else + #error 'dlfcn.h not found' +#endif -#include "config.h" #include "org_apache_hadoop_io_compress_snappy.h" #include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h" @@ -99,3 +123,5 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso return (jint)compressed_direct_buf_len; } + +#endif //define HADOOP_SNAPPY_LIBRARY diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c index a5f07ca5566..767c5f4b313 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c @@ -16,12 +16,36 @@ * limitations under the License. */ -#include -#include -#include -#include +#if defined HAVE_CONFIG_H + #include +#endif + +#if defined HADOOP_SNAPPY_LIBRARY + +#if defined HAVE_STDIO_H + #include +#else + #error 'stdio.h not found' +#endif + +#if defined HAVE_STDLIB_H + #include +#else + #error 'stdlib.h not found' +#endif + +#if defined HAVE_STRING_H + #include +#else + #error 'string.h not found' +#endif + +#if defined HAVE_DLFCN_H + #include +#else + #error 'dlfcn.h not found' +#endif -#include "config.h" #include "org_apache_hadoop_io_compress_snappy.h" #include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h" @@ -103,3 +127,5 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompres return (jint)uncompressed_direct_buf_len; } + +#endif //define HADOOP_SNAPPY_LIBRARY diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h index 3e99d5d20d2..815e0306736 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h @@ -17,13 +17,42 @@ */ -#ifndef ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H +#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H #define ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H -#include "org_apache_hadoop.h" -#include -#include -#include -#include + +#if defined HAVE_CONFIG_H + #include +#endif + +#if defined HADOOP_SNAPPY_LIBRARY + + #if defined HAVE_STDDEF_H + #include + #else + #error 'stddef.h not found' + #endif + + #if defined HAVE_SNAPPY_C_H + #include + #else + #error 'Please install snappy-development packages for your platform.' + #endif + + #if defined HAVE_DLFCN_H + #include + #else + #error "dlfcn.h not found" + #endif + + #if defined HAVE_JNI_H + #include + #else + #error 'jni.h not found' + #endif + + #include "org_apache_hadoop.h" + +#endif //define HADOOP_SNAPPY_LIBRARY #endif //ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am new file mode 100644 index 00000000000..821f33f0527 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am @@ -0,0 +1,53 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# Makefile template for building native 'zlib' for hadoop. +# + +# +# Notes: +# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/$(subdir) . +# 2. This makefile depends on the following environment variables to function correctly: +# * HADOOP_NATIVE_SRCDIR +# * JAVA_HOME +# * JVM_DATA_MODEL +# * OS_ARCH +# * PLATFORM +# All these are setup by build.xml and/or the top-level makefile. +# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are +# assumed to be in $(HADOOP_PREFIX)/build/native/src/org/apache/hadoop/io/compress/zlib. +# + +# The 'vpath directive' to locate the actual source files +vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir) + +AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src +AM_LDFLAGS = @JNI_LDFLAGS@ +AM_CFLAGS = -g -Wall -fPIC -O2 +if SPECIFY_DATA_MODEL +AM_CFLAGS += -m$(JVM_DATA_MODEL) +endif + +noinst_LTLIBRARIES = libnativezlib.la +libnativezlib_la_SOURCES = ZlibCompressor.c ZlibDecompressor.c +libnativezlib_la_LIBADD = -ldl -ljvm + +# +#vim: sw=4: ts=4: noet +# diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c index 689c783ef7e..9ada3f03b05 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c @@ -16,12 +16,34 @@ * limitations under the License. */ -#include -#include -#include -#include +#if defined HAVE_CONFIG_H + #include +#endif + +#if defined HAVE_STDIO_H + #include +#else + #error 'stdio.h not found' +#endif + +#if defined HAVE_STDLIB_H + #include +#else + #error 'stdlib.h not found' +#endif + +#if defined HAVE_STRING_H + #include +#else + #error 'string.h not found' +#endif + +#if defined HAVE_DLFCN_H + #include +#else + #error 'dlfcn.h not found' +#endif -#include "config.h" #include "org_apache_hadoop_io_compress_zlib.h" #include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c index 6abe36381f1..3047dba2672 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c @@ -16,12 +16,34 @@ * limitations under the License. */ -#include -#include -#include -#include +#if defined HAVE_CONFIG_H + #include +#endif + +#if defined HAVE_STDIO_H + #include +#else + #error 'stdio.h not found' +#endif + +#if defined HAVE_STDLIB_H + #include +#else + #error 'stdlib.h not found' +#endif + +#if defined HAVE_STRING_H + #include +#else + #error 'string.h not found' +#endif + +#if defined HAVE_DLFCN_H + #include +#else + #error 'dlfcn.h not found' +#endif -#include "config.h" #include "org_apache_hadoop_io_compress_zlib.h" #include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h index c53aa531c99..16b607b4a91 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h @@ -19,13 +19,40 @@ #if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H #define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H -#include -#include -#include -#include -#include +#if defined HAVE_CONFIG_H + #include +#endif + +#if defined HAVE_STDDEF_H + #include +#else + #error 'stddef.h not found' +#endif + +#if defined HAVE_ZLIB_H + #include +#else + #error 'Please install zlib-development packages for your platform.' +#endif + +#if defined HAVE_ZCONF_H + #include +#else + #error 'Please install zlib-development packages for your platform.' +#endif + +#if defined HAVE_DLFCN_H + #include +#else + #error "dlfcn.h not found" +#endif + +#if defined HAVE_JNI_H + #include +#else + #error 'jni.h not found' +#endif -#include "config.h" #include "org_apache_hadoop.h" /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */ diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c index c08ea037d9f..fbcf9563ee4 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c @@ -16,6 +16,9 @@ * limitations under the License. */ +// get the autoconf settings +#include "config.h" + #include #include #include @@ -29,7 +32,6 @@ #include #include -#include "config.h" #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_nativeio_NativeIO.h" #include "file_descriptor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c index dd51c0a2578..869c2ba2e8e 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c @@ -16,6 +16,9 @@ * limitations under the License. */ +// get the autoconf settings +#include "config.h" + #include #include #include @@ -23,7 +26,6 @@ #include #include -#include "config.h" #include "org_apache_hadoop.h" #include "org_apache_hadoop_util_NativeCrc32.h" #include "gcc_optimizations.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h b/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h index a50c41dbbb4..7a777c2f4f0 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h @@ -24,10 +24,21 @@ #if !defined ORG_APACHE_HADOOP_H #define ORG_APACHE_HADOOP_H -#include -#include +#if defined HAVE_CONFIG_H + #include +#endif -#include "config.h" +#if defined HAVE_DLFCN_H + #include +#else + #error "dlfcn.h not found" +#endif + +#if defined HAVE_JNI_H + #include +#else + #error 'jni.h not found' +#endif /* A helper macro to 'throw' a java exception. */ #define THROW(env, exception_name, message) \ diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index 60920adcb3b..238a738702c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -377,22 +377,76 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> maven-antrun-plugin - make + compile compile - run + + run + - - - - - - - + + + + + + + + org.codehaus.mojo + make-maven-plugin + + + compile + compile + + autoreconf + configure + make-install + + + + ${project.build.directory}/native + + -i + -f + + + + + + ac_cv_func_malloc_0_nonnull + yes + + + JVM_ARCH + ${sun.arch.data.model} + + + + + ${project.build.directory}/native + /usr/local + + + + + ac_cv_func_malloc_0_nonnull + yes + + + JVM_ARCH + ${sun.arch.data.model} + + + + + ${project.build.directory}/native/target + + + + + + 4.0.0 + + org.apache.hadoop + hadoop-project + 2.0.1-SNAPSHOT + ../../../../../hadoop-project + + org.apache.hadoop.contrib + hadoop-hdfs-fuse + 2.0.1-SNAPSHOT + pom + + Apache Hadoop HDFS Fuse + Apache Hadoop HDFS Fuse + + + + org.apache.hadoop + hadoop-hdfs + compile + + + org.apache.hadoop + hadoop-hdfs + test + test-jar + + + + + + + + + org.apache.maven.plugins + maven-eclipse-plugin + 2.6 + + + org.apache.maven.plugins + maven-surefire-plugin + + 1 + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + javadoc + + site + + true + true + false + ${maven.compile.source} + ${maven.compile.encoding} + + + HttpFs API + * + + + + + + + + org.apache.maven.plugins + maven-project-info-reports-plugin + + + + false + + + dependencies + + site + + + + + org.apache.rat + apache-rat-plugin + + + + + + + + + + + fuse + + false + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + prepare-compile-native + generate-sources + + run + + + + + + + + + + + compile-fuse + compile + + run + + + + + + + + + + + + + + + + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt deleted file mode 100644 index fb3c580e94c..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt +++ /dev/null @@ -1,73 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Find Linux FUSE -IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - find_package(PkgConfig REQUIRED) - pkg_check_modules(FUSE fuse) - IF(FUSE_FOUND) - FLATTEN_LIST("${FUSE_CFLAGS}" " " FUSE_CFLAGS) - FLATTEN_LIST("${FUSE_LDFLAGS}" " " FUSE_LDFLAGS) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FUSE_CFLAGS}") - set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} ${FUSE_LDFLAGS}") - MESSAGE(STATUS "Building Linux FUSE client.") - include_directories(${FUSE_INCLUDE_DIRS}) - ELSE(FUSE_FOUND) - MESSAGE(STATUS "Failed to find Linux FUSE libraries or include files. Will not build FUSE client.") - ENDIF(FUSE_FOUND) -ELSE (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - MESSAGE(STATUS "Non-Linux system detected. Will not build FUSE client.") -ENDIF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - -IF(FUSE_FOUND) - add_executable(fuse_dfs - fuse_dfs.c - fuse_options.c - fuse_connect.c - fuse_impls_access.c - fuse_impls_chmod.c - fuse_impls_chown.c - fuse_impls_create.c - fuse_impls_flush.c - fuse_impls_getattr.c - fuse_impls_mkdir.c - fuse_impls_mknod.c - fuse_impls_open.c - fuse_impls_read.c - fuse_impls_readdir.c - fuse_impls_release.c - fuse_impls_rename.c - fuse_impls_rmdir.c - fuse_impls_statfs.c - fuse_impls_symlink.c - fuse_impls_truncate.c - fuse_impls_unlink.c - fuse_impls_utimens.c - fuse_impls_write.c - fuse_init.c - fuse_stat_struct.c - fuse_trash.c - fuse_users.c - ) - target_link_libraries(fuse_dfs - ${FUSE_LIBRARIES} - ${JAVA_JVM_LIBRARY} - hdfs - m - ) -ENDIF(FUSE_FOUND) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am new file mode 100644 index 00000000000..706297f314e --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +bin_PROGRAMS = fuse_dfs +fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c +AM_CFLAGS= -Wall -g +AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/../../src/main/native -I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include +AM_LDFLAGS= -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib64 -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib -L$(FUSE_HOME)/lib -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server +fuse_dfs_LDADD=-lfuse -lhdfs -ljvm -lm diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h index 4554dbdbea5..56ed9cb1738 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h +++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h @@ -31,9 +31,13 @@ #include #include -#include +#ifdef HAVE_CONFIG_H +#include +#endif -#include "config.h" +#ifdef HAVE_SETXATTR +#include +#endif // // Check if a path is in the mount option supplied protected paths. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am new file mode 100644 index 00000000000..8bbd627315f --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +@PRODUCT_MK@ + +#AM_CPPFLAGS = -I$(top_srcdir) +ACLOCAL_AMFLAGS = -I m4 + +lib_LTLIBRARIES = libhdfs.la +libhdfs_la_SOURCES = hdfs.c hdfsJniHelper.c hdfs.h + +#check_PROGRAMS = hdfs_test hdfs_read hdfs_write +check_PROGRAMS = hdfs_test hdfs_read hdfs_write + +hdfs_test_SOURCES = hdfs_test.c hdfs.h +hdfs_test_LDADD = ${libdir}/libhdfs.la + +hdfs_read_SOURCES = hdfs_read.c +hdfs_read_LDADD = ${libdir}/libhdfs.la + +hdfs_write_SOURCES = hdfs_write.c +hdfs_write_LDADD = ${libdir}/libhdfs.la + +test: hdfs_test hdfs_read hdfs_write + ${LIBHDFS_SRC_DIR}/tests/test-libhdfs.sh + + +# vim: sw=4: ts=4: noet diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac new file mode 100644 index 00000000000..d801fc47385 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac @@ -0,0 +1,125 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Autoconf input file +# $Id$ + +AC_INIT([libhdfs], [0.1.0], omalley@apache.org) +AC_PREFIX_DEFAULT([`pwd`/../install]) +AC_CONFIG_AUX_DIR([config]) + +# Generates Makefile from Makefile.am. Modify when new subdirs are added. +# Change Makefile.am also to add subdirectly. +AM_INIT_AUTOMAKE(foreign no-dist) +AC_CONFIG_FILES(Makefile) + +LT_INIT + +AC_CONFIG_MACRO_DIR([m4]) +dnl ------------------------------------------------------------------------- +dnl Check current host (forget about cross compilation) and validate it +dnl against the cache (fail if the cache differs) +dnl ------------------------------------------------------------------------- +AP_MSG_HEADER([Current host]) +AC_CANONICAL_HOST() +AP_CANONICAL_HOST_CHECK() + +dnl ------------------------------------------------------------------------- +dnl Check C environment +dnl ------------------------------------------------------------------------- +AP_MSG_HEADER([C-Language compilation tools]) +AC_PROG_CC() +AC_CHECK_TOOL(RANLIB, ranlib, :) + +dnl ------------------------------------------------------------------------- +dnl Check if this host is supported +dnl ------------------------------------------------------------------------- +AP_MSG_HEADER([Host support]) +AP_SUPPORTED_HOST() +if test "$supported_os" = "darwin" +then + if test -z "$JAVA_HOME" -a -d /System/Library/Frameworks/JavaVM.framework/Home; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Home + fi + + _prevdir=`/bin/pwd` + if test -n "$JAVA_HOME" -a -d "$JAVA_HOME/include"; then + cd "$JAVA_HOME/include" + elif test -n "$JAVA_HOME" -a -d "$JAVA_HOME/../Headers"; then + cd "$JAVA_HOME/../Headers" + else + cd /System/Library/Frameworks/JavaVM.framework/Headers + fi + CFLAGS="$CFLAGS -m${JVM_ARCH} -I`/bin/pwd -P`" + cd $_prevdir + unset _prevdir +fi + +dnl ------------------------------------------------------------------------- +dnl Check JAVA environment +dnl ------------------------------------------------------------------------- +AP_MSG_HEADER([Java compilation tools]) +AP_JAVA() +AP_SABLEVM() +AP_KAFFE() +AP_PROG_JAVAC() +AP_PROG_JAR() +AP_JVM_LIBDIR() +if test "$supported_os" != "darwin" +then + case $host_cpu in + arm*) ;; + *) + CFLAGS="$CFLAGS -m${JVM_ARCH}" + LDFLAGS="$LDFLAGS -m${JVM_ARCH}" + ;; + esac + AC_MSG_RESULT([VALUE OF JVM_ARCH IS :$JVM_ARCH]) + CFLAGS="$CFLAGS -I$JAVA_HOME/include -I$JAVA_HOME/include/$supported_os" + LDFLAGS="$LDFLAGS -L$LIB_JVM_DIR -ljvm -Wl,-x" +fi + +dnl ------------------------------------------------------------------------- +dnl Add gcc specific CFLAGS. +dnl ------------------------------------------------------------------------- +if test "$GCC" = "yes" +then + CFLAGS="$CFLAGS -Wall -Wstrict-prototypes" + AC_MSG_RESULT([gcc flags added]) +fi +dnl ------------------------------------------------------------------------- +dnl Add gcc specific CFLAGS. +dnl ------------------------------------------------------------------------- +if test -z "$LDCMD" +then + LDCMD="$CC" +fi +AC_SUBST(LDCMD) + + +AC_PROG_CC +AC_PROG_LIBTOOL + +AC_TYPE_SIZE_T +AC_CHECK_FUNCS([strdup strerror strtoul]) +AC_CHECK_HEADERS([fcntl.h]) +AC_C_CONST +AC_C_VOLATILE +#AC_FUNC_MALLOC +AC_HEADER_STDBOOL +AC_SUBST(PRODUCT_MK) +AC_OUTPUT diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 new file mode 100644 index 00000000000..cb5938ffca5 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 @@ -0,0 +1,41 @@ +dnl +dnl Licensed to the Apache Software Foundation (ASF) under one or more +dnl contributor license agreements. See the NOTICE file distributed with +dnl this work for additional information regarding copyright ownership. +dnl The ASF licenses this file to You under the Apache License, Version 2.0 +dnl (the "License"); you may not use this file except in compliance with +dnl the License. You may obtain a copy of the License at +dnl +dnl http://www.apache.org/licenses/LICENSE-2.0 +dnl +dnl Unless required by applicable law or agreed to in writing, software +dnl distributed under the License is distributed on an "AS IS" BASIS, +dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +dnl See the License for the specific language governing permissions and +dnl limitations under the License. +dnl + +dnl ------------------------------------------------------------------------- +dnl Author Pier Fumagalli +dnl Version $Id$ +dnl ------------------------------------------------------------------------- + +AC_DEFUN([AP_MSG_HEADER],[ + printf "*** %s ***\n" "$1" 1>&2 + AC_PROVIDE([$0]) +]) + +AC_DEFUN([AP_CANONICAL_HOST_CHECK],[ + AC_MSG_CHECKING([cached host system type]) + if { test x"${ac_cv_host_system_type+set}" = x"set" && + test x"$ac_cv_host_system_type" != x"$host" ; } + then + AC_MSG_RESULT([$ac_cv_host_system_type]) + AC_MSG_ERROR([remove the \"$cache_file\" file and re-run configure]) + else + AC_MSG_RESULT(ok) + ac_cv_host_system_type="$host" + fi + AC_PROVIDE([$0]) +]) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 new file mode 100644 index 00000000000..993fc5bed93 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 @@ -0,0 +1,142 @@ +dnl +dnl Licensed to the Apache Software Foundation (ASF) under one or more +dnl contributor license agreements. See the NOTICE file distributed with +dnl this work for additional information regarding copyright ownership. +dnl The ASF licenses this file to You under the Apache License, Version 2.0 +dnl (the "License"); you may not use this file except in compliance with +dnl the License. You may obtain a copy of the License at +dnl +dnl http://www.apache.org/licenses/LICENSE-2.0 +dnl +dnl Unless required by applicable law or agreed to in writing, software +dnl distributed under the License is distributed on an "AS IS" BASIS, +dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +dnl See the License for the specific language governing permissions and +dnl limitations under the License. +dnl + +dnl ------------------------------------------------------------------------- +dnl Author Pier Fumagalli +dnl Version $Id$ +dnl ------------------------------------------------------------------------- + +AC_DEFUN([AP_PROG_JAVAC_WORKS],[ + AC_CACHE_CHECK([wether the Java compiler ($JAVAC) works],ap_cv_prog_javac_works,[ + echo "public class Test {}" > Test.java + $JAVAC $JAVACFLAGS Test.java > /dev/null 2>&1 + if test $? -eq 0 + then + rm -f Test.java Test.class + ap_cv_prog_javac_works=yes + else + rm -f Test.java Test.class + AC_MSG_RESULT(no) + AC_MSG_ERROR([installation or configuration problem: javac cannot compile]) + fi + ]) +]) + +dnl Check for JAVA compilers. +AC_DEFUN([AP_PROG_JAVAC],[ + if test "$SABLEVM" != "NONE" + then + AC_PATH_PROG(JAVACSABLE,javac-sablevm,NONE,$JAVA_HOME/bin) + else + JAVACSABLE="NONE" + fi + if test "$JAVACSABLE" = "NONE" + then + XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH" + AC_PATH_PROG(JAVAC,javac,NONE,$XPATH) + else + AC_PATH_PROG(JAVAC,javac-sablevm,NONE,$JAVA_HOME/bin) + fi + AC_MSG_RESULT([$JAVAC]) + if test "$JAVAC" = "NONE" + then + AC_MSG_ERROR([javac not found]) + fi + AP_PROG_JAVAC_WORKS() + AC_PROVIDE([$0]) + AC_SUBST(JAVAC) + AC_SUBST(JAVACFLAGS) +]) + +dnl Check for jar archivers. +AC_DEFUN([AP_PROG_JAR],[ + if test "$SABLEVM" != "NONE" + then + AC_PATH_PROG(JARSABLE,jar-sablevm,NONE,$JAVA_HOME/bin) + else + JARSABLE="NONE" + fi + if test "$JARSABLE" = "NONE" + then + XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH" + AC_PATH_PROG(JAR,jar,NONE,$XPATH) + else + AC_PATH_PROG(JAR,jar-sablevm,NONE,$JAVA_HOME/bin) + fi + if test "$JAR" = "NONE" + then + AC_MSG_ERROR([jar not found]) + fi + AC_PROVIDE([$0]) + AC_SUBST(JAR) +]) + +AC_DEFUN([AP_JAVA],[ + AC_ARG_WITH(java,[ --with-java=DIR Specify the location of your JDK installation],[ + AC_MSG_CHECKING([JAVA_HOME]) + if test -d "$withval" + then + JAVA_HOME="$withval" + AC_MSG_RESULT([$JAVA_HOME]) + else + AC_MSG_RESULT([failed]) + AC_MSG_ERROR([$withval is not a directory]) + fi + AC_SUBST(JAVA_HOME) + ]) + if test x"$JAVA_HOME" = x + then + AC_MSG_ERROR([Java Home not defined. Rerun with --with-java=[...] parameter]) + fi +]) + +dnl check if the JVM in JAVA_HOME is sableVM +dnl $JAVA_HOME/bin/sablevm and /opt/java/lib/sablevm/bin are tested. +AC_DEFUN([AP_SABLEVM],[ + if test x"$JAVA_HOME" != x + then + AC_PATH_PROG(SABLEVM,sablevm,NONE,$JAVA_HOME/bin) + if test "$SABLEVM" = "NONE" + then + dnl java may be SableVM. + if $JAVA_HOME/bin/java -version 2> /dev/null | grep SableVM > /dev/null + then + SABLEVM=$JAVA_HOME/bin/java + fi + fi + if test "$SABLEVM" != "NONE" + then + AC_MSG_RESULT([Using sableVM: $SABLEVM]) + CFLAGS="$CFLAGS -DHAVE_SABLEVM" + fi + fi +]) + +dnl check if the JVM in JAVA_HOME is kaffe +dnl $JAVA_HOME/bin/kaffe is tested. +AC_DEFUN([AP_KAFFE],[ + if test x"$JAVA_HOME" != x + then + AC_PATH_PROG(KAFFEVM,kaffe,NONE,$JAVA_HOME/bin) + if test "$KAFFEVM" != "NONE" + then + AC_MSG_RESULT([Using kaffe: $KAFFEVM]) + CFLAGS="$CFLAGS -DHAVE_KAFFEVM" + LDFLAGS="$LDFLAGS -Wl,-rpath $JAVA_HOME/jre/lib/$HOST_CPU -L $JAVA_HOME/jre/lib/$HOST_CPU -lkaffevm" + fi + fi +]) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 new file mode 100644 index 00000000000..c3fb0e29d90 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 @@ -0,0 +1,168 @@ +dnl +dnl Licensed to the Apache Software Foundation (ASF) under one or more +dnl contributor license agreements. See the NOTICE file distributed with +dnl this work for additional information regarding copyright ownership. +dnl The ASF licenses this file to You under the Apache License, Version 2.0 +dnl (the "License"); you may not use this file except in compliance with +dnl the License. You may obtain a copy of the License at +dnl +dnl http://www.apache.org/licenses/LICENSE-2.0 +dnl +dnl Unless required by applicable law or agreed to in writing, software +dnl distributed under the License is distributed on an "AS IS" BASIS, +dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +dnl See the License for the specific language governing permissions and +dnl limitations under the License. +dnl + +dnl ------------------------------------------------------------------------- +dnl Author Pier Fumagalli +dnl Version $Id$ +dnl ------------------------------------------------------------------------- + +AC_DEFUN([AP_SUPPORTED_HOST],[ + AC_MSG_CHECKING([C flags dependant on host system type]) + + case $host_os in + darwin*) + CFLAGS="$CFLAGS -DOS_DARWIN -DDSO_DYLD" + supported_os="darwin" + ;; + solaris*) + CFLAGS="$CFLAGS -DOS_SOLARIS -DDSO_DLFCN" + supported_os="solaris" + LIBS="$LIBS -ldl -lthread" + ;; + linux*) + CFLAGS="$CFLAGS -DOS_LINUX -DDSO_DLFCN" + supported_os="linux" + LIBS="$LIBS -ldl -lpthread" + ;; + cygwin) + CFLAGS="$CFLAGS -DOS_CYGWIN -DDSO_DLFCN -DNO_SETSID" + supported_os="win32" + ;; + sysv) + CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN" + LIBS="$LIBS -ldl" + ;; + sysv4) + CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN -Kthread" + LDFLAGS="-Kthread $LDFLAGS" + LIBS="$LIBS -ldl" + ;; + freebsd*) + CFLAGS="$CFLAGS -DOS_FREEBSD -DDSO_DLFCN -D_THREAD_SAFE -pthread" + LDFLAGS="-pthread $LDFLAGS" + supported_os="freebsd" + ;; + osf5*) + CFLAGS="$CFLAGS -pthread -DOS_TRU64 -DDSO_DLFCN -D_XOPEN_SOURCE_EXTENDED" + LDFLAGS="$LDFLAGS -pthread" + ;; + hpux11*) + CFLAGS="$CFLAGS -pthread -DOS_HPUX -DDSO_DLFCN" + LDFLAGS="$LDFLAGS -pthread" + LIBS="$LIBS -lpthread" + ;; + *) + AC_MSG_RESULT([failed]) + AC_MSG_ERROR([Unsupported operating system "$host_os"]);; + esac + + case $host_cpu in + powerpc) + CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\"" + HOST_CPU=$host_cpu;; + sparc*) + CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\"" + HOST_CPU=$host_cpu;; + i?86) + CFLAGS="$CFLAGS -DCPU=\\\"i386\\\"" + HOST_CPU=i386;; + x86_64) + CFLAGS="$CFLAGS -DCPU=\\\"amd64\\\"" + HOST_CPU=amd64;; + bs2000) + CFLAGS="$CFLAGS -DCPU=\\\"osd\\\" -DCHARSET_EBCDIC -DOSD_POSIX" + supported_os="osd" + LDFLAGS="-Kno_link_stdlibs -B llm4" + LIBS="$LIBS -lBLSLIB" + LDCMD="/opt/C/bin/cc" + HOST_CPU=osd;; + mips) + CFLAGS="$CFLAGS -DCPU=\\\"mips\\\"" + supported_os="mips" + HOST_CPU=mips;; + alpha*) + CFLAGS="$CFLAGS -DCPU=\\\"alpha\\\"" + supported_os="alpha" + HOST_CPU=alpha;; + hppa2.0w) + CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0W\\\"" + supported_os="hp-ux" + HOST_CPU=PA_RISC2.0W;; + hppa2.0) + CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0\\\"" + supported_os="hp-ux" + HOST_CPU=PA_RISC2.0;; + mipsel) + CFLAGS="$CFLAGS -DCPU=\\\"mipsel\\\"" + supported_os="mipsel" + HOST_CPU=mipsel;; + ia64) + CFLAGS="$CFLAGS -DCPU=\\\"ia64\\\"" + supported_os="ia64" + HOST_CPU=ia64;; + s390) + CFLAGS="$CFLAGS -DCPU=\\\"s390\\\"" + supported_os="s390" + HOST_CPU=s390;; + arm*) + CFLAGS="$CFLAGS -DCPU=\\\"arm\\\"" + supported_os="arm" + HOST_CPU=arm;; + *) + AC_MSG_RESULT([failed]) + AC_MSG_ERROR([Unsupported CPU architecture "$host_cpu"]);; + esac + + AC_MSG_RESULT([ok]) + AC_SUBST(CFLAGS) + AC_SUBST(LDFLAGS) +]) + +AC_DEFUN([AP_JVM_LIBDIR],[ + AC_MSG_CHECKING([where on earth this jvm library is..]) + javabasedir=$JAVA_HOME + case $host_os in + cygwin* | mingw* | pw23* ) + lib_jvm_dir=`find $javabasedir -follow \( \ + \( -name client -type d -prune \) -o \ + \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` + ;; + aix*) + lib_jvm_dir=`find $javabasedir \( \ + \( -name client -type d -prune \) -o \ + \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` + if test -z "$lib_jvm_dir"; then + lib_jvm_dir=`find $javabasedir \( \ + \( -name client -type d -prune \) -o \ + \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` + fi + ;; + *) + lib_jvm_dir=`find $javabasedir -follow \( \ + \( -name client -type d -prune \) -o \ + \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` + if test -z "$lib_jvm_dir"; then + lib_jvm_dir=`find $javabasedir -follow \( \ + \( -name client -type d -prune \) -o \ + \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` + fi + ;; + esac + LIB_JVM_DIR=$lib_jvm_dir + AC_MSG_RESULT([ohh u there ... $LIB_JVM_DIR]) + AC_SUBST(LIB_JVM_DIR) +]) diff --git a/hadoop-hdfs-project/pom.xml b/hadoop-hdfs-project/pom.xml index ac08de5392b..984e460b4d0 100644 --- a/hadoop-hdfs-project/pom.xml +++ b/hadoop-hdfs-project/pom.xml @@ -33,6 +33,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> hadoop-hdfs hadoop-hdfs-httpfs + hadoop-hdfs/src/contrib/fuse-dfs hadoop-hdfs/src/contrib/bkjournal diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml index 22f84730695..c745db8a27f 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml @@ -47,37 +47,47 @@ - org.apache.maven.plugins - maven-antrun-plugin + org.codehaus.mojo + make-maven-plugin - make + compile compile - run - - - - - - - - - - - - + + autoreconf + configure + make-install + - native_tests + test test - - - - - - + + test + + + + ${project.build.directory}/native/container-executor + + -i + + + + + + CFLAGS + -DHADOOP_CONF_DIR=${container-executor.conf.dir} ${container-executor.additional_cflags} + + + ${project.build.directory}/native/container-executor + /usr/local + + + ${project.build.directory}/native/target + + @@ -162,6 +172,14 @@ run + + + + + + + + diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt deleted file mode 100644 index e639507d262..00000000000 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt +++ /dev/null @@ -1,66 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cmake_minimum_required(VERSION 2.6 FATAL_ERROR) - -set(CMAKE_BUILD_TYPE, Release) - -if (JVM_ARCH_DATA_MODEL EQUAL 32) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32") - set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32") -endif (JVM_ARCH_DATA_MODEL EQUAL 32) - -function(output_directory TGT DIR) - SET_TARGET_PROPERTIES(${TGT} PROPERTIES - RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") - SET_TARGET_PROPERTIES(${TGT} PROPERTIES - ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") - SET_TARGET_PROPERTIES(${TGT} PROPERTIES - LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") -endfunction(output_directory TGT DIR) - -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2 -D_GNU_SOURCE") -# note: can't enable -D_LARGEFILE: see MAPREDUCE-4258 -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT") - -include_directories( - ${CMAKE_CURRENT_SOURCE_DIR} - ${CMAKE_BINARY_DIR} - main/native/container-executor - main/native/container-executor/impl -) -CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h) - -add_library(container - main/native/container-executor/impl/configuration.c - main/native/container-executor/impl/container-executor.c -) - -add_executable(container-executor - main/native/container-executor/impl/main.c -) -target_link_libraries(container-executor - container -) -output_directory(container-executor target/usr/local/bin) - -add_executable(test-container-executor - main/native/container-executor/test/test-container-executor.c -) -target_link_libraries(test-container-executor - container -) -output_directory(test-container-executor target/usr/local/bin) diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake deleted file mode 100644 index 1fff36131f6..00000000000 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake +++ /dev/null @@ -1,6 +0,0 @@ -#ifndef CONFIG_H -#define CONFIG_H - -#cmakedefine HADOOP_CONF_DIR "@HADOOP_CONF_DIR@" - -#endif diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg new file mode 100644 index 00000000000..d21d1c9877a --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# autom4te configuration for hadoop utils library +# + +begin-language: "Autoheader-preselections" +args: --no-cache +end-language: "Autoheader-preselections" + +begin-language: "Automake-preselections" +args: --no-cache +end-language: "Automake-preselections" + +begin-language: "Autoreconf-preselections" +args: --no-cache +end-language: "Autoreconf-preselections" + +begin-language: "Autoconf-without-aclocal-m4" +args: --no-cache +end-language: "Autoconf-without-aclocal-m4" + +begin-language: "Autoconf" +args: --no-cache +end-language: "Autoconf" + diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po new file mode 100644 index 00000000000..9ce06a81ea4 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po @@ -0,0 +1 @@ +# dummy diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am new file mode 100644 index 00000000000..4938bb2f53a --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +AM_CFLAGS=-I$(srcdir)/impl -Wall -g -Werror + +# Define the programs that need to be built +bin_PROGRAMS = container-executor +check_PROGRAMS = test-container-executor + +TESTS = test-container-executor + +# Define the sources for the common files +common_SOURCES = impl/configuration.c impl/container-executor.c + +# Define the sources for the real executable +container_executor_SOURCES = $(common_SOURCES) impl/main.c + +# Define the sources for the test executable +test_container_executor_SOURCES = $(common_SOURCES) test/test-container-executor.c diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac new file mode 100644 index 00000000000..db8af88cf12 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# -*- Autoconf -*- +# Process this file with autoconf to produce a configure script. + +AC_PREREQ(2.59) +AC_INIT(linux-container-executor, 1.0.0, mapreduce-dev@hadoop.apache.org) +AC_GNU_SOURCE +#AC_SYS_LARGEFILE + +AM_INIT_AUTOMAKE([subdir-objects foreign no-dist]) + +AC_CONFIG_SRCDIR([impl/container-executor.c]) +AC_CONFIG_FILES([Makefile]) + +AC_PREFIX_DEFAULT(`pwd`/../install) + +CHECK_INSTALL_CFLAG +HADOOP_UTILS_SETUP + +# Checks for programs. +AC_PROG_CC +AM_PROG_CC_C_O + +# Checks for libraries. + +# Checks for header files. +AC_LANG(C) +AC_CHECK_HEADERS([unistd.h]) + +# Checks for typedefs, structures, and compiler characteristics. +AC_HEADER_STDBOOL +AC_C_CONST +AC_TYPE_OFF_T +AC_TYPE_SIZE_T +AC_FUNC_STRERROR_R + +# Checks for library functions. +AC_CHECK_FUNCS([mkdir uname]) +AC_OUTPUT diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c index d6ce5aa7061..cd8caabe333 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c @@ -16,7 +16,6 @@ * limitations under the License. */ -#include "config.h" #include "configuration.h" #include "container-executor.h" @@ -30,6 +29,8 @@ #include #include +#define _STRINGIFY(X) #X +#define STRINGIFY(X) _STRINGIFY(X) #define CONF_FILENAME "container-executor.cfg" // When building as part of a Maven build this value gets defined by using @@ -100,7 +101,7 @@ int main(int argc, char **argv) { char *executable_file = get_executable(); - char *orig_conf_file = HADOOP_CONF_DIR "/" CONF_FILENAME; + char *orig_conf_file = STRINGIFY(HADOOP_CONF_DIR) "/" CONF_FILENAME; char *conf_file = resolve_config_path(orig_conf_file, argv[0]); char *local_dirs, *log_dirs;