diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index a36b74d1e00..81420806136 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -536,31 +536,10 @@ /usr/local ${snappy.prefix}/lib ${snappy.prefix}/include + - - org.apache.maven.plugins - maven-antrun-plugin - - - compile - compile - - run - - - - - - - - - - - - - org.codehaus.mojo native-maven-plugin @@ -590,73 +569,27 @@ - org.codehaus.mojo - make-maven-plugin + org.apache.maven.plugins + maven-antrun-plugin - compile + make compile - - autoreconf - configure - make-install - + run + + + + + + + + + + + + - - - ${project.build.directory}/native - - -i - -f - - - - - - OS_NAME - ${os.name} - - - OS_ARCH - ${os.arch} - - - JVM_DATA_MODEL - ${sun.arch.data.model} - - - - CPPFLAGS=-I${snappy.include} - LDFLAGS=-L${snappy.lib} - - ${project.build.directory}/native - /usr/local - - - - - OS_NAME - ${os.name} - - - OS_ARCH - ${os.arch} - - - JVM_DATA_MODEL - ${sun.arch.data.model} - - - HADOOP_NATIVE_SRCDIR - ${project.build.directory}/native - - - - - ${project.build.directory}/native/target - - @@ -700,7 +633,7 @@ maven-antrun-plugin - compile + kdc compile run diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt new file mode 100644 index 00000000000..c6325311800 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/CMakeLists.txt @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +cmake_minimum_required(VERSION 2.6 FATAL_ERROR) + +# Default to release builds +set(CMAKE_BUILD_TYPE, Release) + +# If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit. +# This variable is set by maven. +if (JVM_ARCH_DATA_MODEL EQUAL 32) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32") + set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32") + if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64") + set(CMAKE_SYSTEM_PROCESSOR "i686") + endif () +endif (JVM_ARCH_DATA_MODEL EQUAL 32) + +# Compile a library with both shared and static variants +function(add_dual_library LIBNAME) + add_library(${LIBNAME} SHARED ${ARGN}) + add_library(${LIBNAME}_static STATIC ${ARGN}) + set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME}) +endfunction(add_dual_library) + +# Link both a static and a dynamic target against some libraries +function(target_link_dual_libraries LIBNAME) + target_link_libraries(${LIBNAME} ${ARGN}) + target_link_libraries(${LIBNAME}_static ${ARGN}) +endfunction(target_link_dual_libraries) + +function(output_directory TGT DIR) + SET_TARGET_PROPERTIES(${TGT} PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") + SET_TARGET_PROPERTIES(${TGT} PROPERTIES + ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") + SET_TARGET_PROPERTIES(${TGT} PROPERTIES + LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") +endfunction(output_directory TGT DIR) + +function(dual_output_directory TGT DIR) + output_directory(${TGT} "${DIR}") + output_directory(${TGT}_static "${DIR}") +endfunction(dual_output_directory TGT DIR) + +if (NOT GENERATED_JAVAH) + # Must identify where the generated headers have been placed + MESSAGE(FATAL_ERROR "You must set the cmake variable GENERATED_JAVAH") +endif (NOT GENERATED_JAVAH) +find_package(JNI REQUIRED) +find_package(ZLIB REQUIRED) + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64") +set(D main/native/src/org/apache/hadoop) + +GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME) + +INCLUDE(CheckFunctionExists) +INCLUDE(CheckCSourceCompiles) +CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE) +CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE) + +find_library(SNAPPY_LIBRARY NAMES snappy PATHS) +find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS) +if (SNAPPY_LIBRARY) + GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME) + set(SNAPPY_SOURCE_FILES + "${D}/io/compress/snappy/SnappyCompressor.c" + "${D}/io/compress/snappy/SnappyDecompressor.c") +else (${SNAPPY_LIBRARY}) + set(SNAPPY_INCLUDE_DIR "") + set(SNAPPY_SOURCE_FILES "") +endif (SNAPPY_LIBRARY) + +include_directories( + ${GENERATED_JAVAH} + main/native/src + ${CMAKE_CURRENT_SOURCE_DIR} + ${CMAKE_CURRENT_SOURCE_DIR}/src + ${CMAKE_BINARY_DIR} + ${JNI_INCLUDE_DIRS} + ${ZLIB_INCLUDE_DIRS} + ${SNAPPY_INCLUDE_DIR} +) +CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h) + +add_dual_library(hadoop + ${D}/io/compress/lz4/Lz4Compressor.c + ${D}/io/compress/lz4/Lz4Decompressor.c + ${D}/io/compress/lz4/lz4.c + ${SNAPPY_SOURCE_FILES} + ${D}/io/compress/zlib/ZlibCompressor.c + ${D}/io/compress/zlib/ZlibDecompressor.c + ${D}/io/nativeio/NativeIO.c + ${D}/io/nativeio/errno_enum.c + ${D}/io/nativeio/file_descriptor.c + ${D}/security/JniBasedUnixGroupsMapping.c + ${D}/security/JniBasedUnixGroupsNetgroupMapping.c + ${D}/security/getGroup.c + ${D}/util/NativeCrc32.c + ${D}/util/bulk_crc32.c +) +target_link_dual_libraries(hadoop + dl + ${JAVA_JVM_LIBRARY} +) +SET(LIBHADOOP_VERSION "1.0.0") +SET_TARGET_PROPERTIES(hadoop PROPERTIES + SOVERSION ${LIBHADOOP_VERSION}) +dual_output_directory(hadoop target/usr/local/lib) diff --git a/hadoop-common-project/hadoop-common/src/config.h.cmake b/hadoop-common-project/hadoop-common/src/config.h.cmake new file mode 100644 index 00000000000..9098b68b87e --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/config.h.cmake @@ -0,0 +1,10 @@ +#ifndef CONFIG_H +#define CONFIG_H + +#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@" +#cmakedefine HADOOP_RUNAS_HOME "@HADOOP_RUNAS_HOME@" +#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@" +#cmakedefine HAVE_SYNC_FILE_RANGE +#cmakedefine HAVE_POSIX_FADVISE + +#endif diff --git a/hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg b/hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg deleted file mode 100644 index a69c197883f..00000000000 --- a/hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg +++ /dev/null @@ -1,42 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# autom4te configuration for hadoop-native library -# - -begin-language: "Autoheader-preselections" -args: --no-cache -end-language: "Autoheader-preselections" - -begin-language: "Automake-preselections" -args: --no-cache -end-language: "Automake-preselections" - -begin-language: "Autoreconf-preselections" -args: --no-cache -end-language: "Autoreconf-preselections" - -begin-language: "Autoconf-without-aclocal-m4" -args: --no-cache -end-language: "Autoconf-without-aclocal-m4" - -begin-language: "Autoconf" -args: --no-cache -end-language: "Autoconf" - diff --git a/hadoop-common-project/hadoop-common/src/main/native/Makefile.am b/hadoop-common-project/hadoop-common/src/main/native/Makefile.am deleted file mode 100644 index c4ca564c2be..00000000000 --- a/hadoop-common-project/hadoop-common/src/main/native/Makefile.am +++ /dev/null @@ -1,66 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# Notes: -# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os-arch}. -# 2. This makefile depends on the following environment variables to function correctly: -# * HADOOP_NATIVE_SRCDIR -# * JAVA_HOME -# * JVM_DATA_MODEL -# * OS_NAME -# * OS_ARCH -# All these are setup by build.xml. -# - -# Export $(PLATFORM) to prevent proliferation of sub-shells -export PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z]) - -ACLOCAL_AMFLAGS = -I m4 -AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \ - -I$(HADOOP_NATIVE_SRCDIR)/javah -AM_LDFLAGS = @JNI_LDFLAGS@ -AM_CFLAGS = -g -Wall -fPIC -O2 -if SPECIFY_DATA_MODEL -AM_LDFLAGS += -m$(JVM_DATA_MODEL) -AM_CFLAGS += -m$(JVM_DATA_MODEL) -endif - -lib_LTLIBRARIES = libhadoop.la -libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \ - src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \ - src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c \ - src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c \ - src/org/apache/hadoop/io/compress/lz4/lz4.c \ - src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c \ - src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c \ - src/org/apache/hadoop/security/getGroup.c \ - src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \ - src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \ - src/org/apache/hadoop/io/nativeio/file_descriptor.c \ - src/org/apache/hadoop/io/nativeio/errno_enum.c \ - src/org/apache/hadoop/io/nativeio/NativeIO.c \ - src/org/apache/hadoop/util/NativeCrc32.c \ - src/org/apache/hadoop/util/bulk_crc32.c - -libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS) -libhadoop_la_LIBADD = -ldl -ljvm - -# -#vim: sw=4: ts=4: noet -# diff --git a/hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 b/hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 deleted file mode 100644 index 93e05b8148d..00000000000 --- a/hadoop-common-project/hadoop-common/src/main/native/acinclude.m4 +++ /dev/null @@ -1,28 +0,0 @@ -# AC_COMPUTE_NEEDED_DSO(LIBRARY, TEST_PROGRAM, PREPROC_SYMBOL) -# -------------------------------------------------- -# Compute the 'actual' dynamic-library used -# for LIBRARY and set it to PREPROC_SYMBOL -AC_DEFUN([AC_COMPUTE_NEEDED_DSO], -[ -AC_CACHE_CHECK([Checking for the 'actual' dynamic-library for '-l$1'], ac_cv_libname_$1, - [ - echo '$2' > conftest.c - if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -l$1 2>&1`"; then - dnl Try objdump and ldd in that order to get the dynamic library - if test ! -z "`which objdump | grep -v 'no objdump'`"; then - ac_cv_libname_$1="`objdump -p conftest | grep NEEDED | grep $1 | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`" - elif test ! -z "`which ldd | grep -v 'no ldd'`"; then - ac_cv_libname_$1="`ldd conftest | grep $1 | sed 's/^[[[^A-Za-z0-9]]]*\([[[A-Za-z0-9\.]]]*\)[[[^A-Za-z0-9]]]*=>.*$/\"\1\"/'`" - elif test ! -z "`which otool | grep -v 'no otool'`"; then - ac_cv_libname_$1=\"`otool -L conftest | grep $1 | sed -e 's/^[ ]*//' -e 's/ .*//' -e 's/.*\/\(.*\)$/\1/'`\"; - else - AC_MSG_ERROR(Can't find either 'objdump' or 'ldd' or 'otool' to compute the dynamic library for '-l$1') - fi - else - ac_cv_libname_$1=libnotfound.so - fi - rm -f conftest* - ] -) -AC_DEFINE_UNQUOTED($3, ${ac_cv_libname_$1}, [The 'actual' dynamic-library for '-l$1']) -])# AC_COMPUTE_NEEDED_DSO diff --git a/hadoop-common-project/hadoop-common/src/main/native/configure.ac b/hadoop-common-project/hadoop-common/src/main/native/configure.ac deleted file mode 100644 index 34408d64182..00000000000 --- a/hadoop-common-project/hadoop-common/src/main/native/configure.ac +++ /dev/null @@ -1,130 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# configure.ac for hadoop native code. -# - -# Notes: -# 1. This configure.ac depends on the following environment variables to function correctly: -# * HADOOP_NATIVE_SRCDIR -# * JAVA_HOME -# * JVM_DATA_MODEL -# * OS_NAME -# * OS_ARCH -# All these are setup by build.xml. - -# -*- Autoconf -*- -# Process this file with autoconf to produce a configure script. -# - -AC_PREREQ(2.59) -AC_INIT(src/org_apache_hadoop.h) -AC_CONFIG_SRCDIR([src/org_apache_hadoop.h]) -AC_CONFIG_AUX_DIR([config]) -AC_CONFIG_MACRO_DIR([m4]) -AC_CONFIG_HEADER([config.h]) -AC_SYS_LARGEFILE -AC_GNU_SOURCE - -AM_INIT_AUTOMAKE(hadoop,1.0.0) - -# Checks for programs. -AC_PROG_CC -AC_PROG_LIBTOOL - -# Checks for libraries. -dnl Check for '-ldl' -AC_CHECK_LIB([dl], [dlopen]) - -dnl Check for '-ljvm' -JNI_LDFLAGS="" -if test $JAVA_HOME != "" -then - JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server" - JVMSOPATH=`find $JAVA_HOME/jre/ -name libjvm.so | head -n 1` - JNI_LDFLAGS="$JNI_LDFLAGS -L`dirname $JVMSOPATH`" -fi -LDFLAGS="$LDFLAGS $JNI_LDFLAGS" -AC_CHECK_LIB([jvm], [JNI_GetCreatedJavaVMs]) -AC_SUBST([JNI_LDFLAGS]) - -# Checks for header files. -dnl Check for Ansi C headers -AC_HEADER_STDC - -dnl Check for other standard C headers -AC_CHECK_HEADERS([stdio.h stddef.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.)) - -dnl Check for JNI headers -JNI_CPPFLAGS="" -if test $JAVA_HOME != "" -then - for dir in `find $JAVA_HOME/include -follow -type d` - do - JNI_CPPFLAGS="$JNI_CPPFLAGS -I$dir" - done -fi -cppflags_bak=$CPPFLAGS -CPPFLAGS="$CPPFLAGS $JNI_CPPFLAGS" -AC_CHECK_HEADERS([jni.h], [], AC_MSG_ERROR([Native java headers not found. Is \$JAVA_HOME set correctly?])) -CPPFLAGS=$cppflags_bak -AC_SUBST([JNI_CPPFLAGS]) - -dnl Check for zlib headers -AC_CHECK_HEADERS([zlib.h zconf.h], - AC_COMPUTE_NEEDED_DSO(z, - [#include "zlib.h" - int main(int argc, char **argv){zlibVersion();return 0;}], - HADOOP_ZLIB_LIBRARY), - AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.)) - -dnl Check for snappy headers -AC_CHECK_HEADERS([snappy-c.h], - AC_COMPUTE_NEEDED_DSO(snappy, - [#include "snappy-c.h" - int main(int argc, char **argv){snappy_compress(0,0,0,0);return 0;}], - HADOOP_SNAPPY_LIBRARY), - AC_MSG_WARN(Snappy headers were not found... building without snappy.)) - -dnl Check for headers needed by the native Group resolution implementation -AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.)) - -dnl check for posix_fadvise -AC_CHECK_HEADERS(fcntl.h, [AC_CHECK_FUNCS(posix_fadvise)]) - -dnl check for sync_file_range -AC_CHECK_HEADERS(fcntl.h, [AC_CHECK_FUNCS(sync_file_range)]) - -# Checks for typedefs, structures, and compiler characteristics. -AC_C_CONST - -# Checks for library functions. -AC_CHECK_FUNCS([memset]) - -# Check for nonstandard STRERROR_R -AC_FUNC_STRERROR_R - -AM_CONDITIONAL([SPECIFY_DATA_MODEL], [case $host_cpu in arm*) false;; *) true;; esac]) - -AC_CONFIG_FILES([Makefile]) -AC_OUTPUT - -# -#vim: sw=2: ts=2: noet -# diff --git a/hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am b/hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am deleted file mode 100644 index 9b536ff440c..00000000000 --- a/hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am +++ /dev/null @@ -1,47 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# Makefile template for building libhadoop.so -# - -# -# Notes: -# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/lib -# 2. This makefile depends on the following environment variables to function correctly: -# * HADOOP_NATIVE_SRCDIR -# * JAVA_HOME -# * OS_ARCH -# All these are setup by build.xml and/or the top-level makefile. -# - -# Add .lo files in $(SUBDIRS) to construct libhadoop.so -HADOOP_OBJS = $(foreach path,$(addprefix ../,$(SUBDIRS)),$(wildcard $(path)/*.lo)) -AM_LDFLAGS = @JNI_LDFLAGS@ -if SPECIFY_DATA_MODEL -AM_LDFLAGS += -m$(JVM_DATA_MODEL) -endif - -lib_LTLIBRARIES = libhadoop.la -libhadoop_la_SOURCES = -libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS) -libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm - -# -#vim: sw=4: ts=4: noet -# diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c index d52a4f6b2a3..641ecd73b7a 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c @@ -16,10 +16,7 @@ * limitations under the License. */ -#if defined HAVE_CONFIG_H - #include -#endif - +#include "config.h" #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c index 547b027cc14..3eebc1859d8 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c @@ -16,10 +16,7 @@ * limitations under the License. */ -#if defined HAVE_CONFIG_H - #include -#endif - +#include "config.h" #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c index 13991c23f4f..96a2402ae7a 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c @@ -16,36 +16,12 @@ * limitations under the License. */ -#if defined HAVE_CONFIG_H - #include -#endif - -#if defined HADOOP_SNAPPY_LIBRARY - -#if defined HAVE_STDIO_H - #include -#else - #error 'stdio.h not found' -#endif - -#if defined HAVE_STDLIB_H - #include -#else - #error 'stdlib.h not found' -#endif - -#if defined HAVE_STRING_H - #include -#else - #error 'string.h not found' -#endif - -#if defined HAVE_DLFCN_H - #include -#else - #error 'dlfcn.h not found' -#endif +#include +#include +#include +#include +#include "config.h" #include "org_apache_hadoop_io_compress_snappy.h" #include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h" @@ -123,5 +99,3 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso return (jint)compressed_direct_buf_len; } - -#endif //define HADOOP_SNAPPY_LIBRARY diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c index 767c5f4b313..a5f07ca5566 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c @@ -16,36 +16,12 @@ * limitations under the License. */ -#if defined HAVE_CONFIG_H - #include -#endif - -#if defined HADOOP_SNAPPY_LIBRARY - -#if defined HAVE_STDIO_H - #include -#else - #error 'stdio.h not found' -#endif - -#if defined HAVE_STDLIB_H - #include -#else - #error 'stdlib.h not found' -#endif - -#if defined HAVE_STRING_H - #include -#else - #error 'string.h not found' -#endif - -#if defined HAVE_DLFCN_H - #include -#else - #error 'dlfcn.h not found' -#endif +#include +#include +#include +#include +#include "config.h" #include "org_apache_hadoop_io_compress_snappy.h" #include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h" @@ -127,5 +103,3 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompres return (jint)uncompressed_direct_buf_len; } - -#endif //define HADOOP_SNAPPY_LIBRARY diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h index 815e0306736..3e99d5d20d2 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h @@ -17,42 +17,13 @@ */ -#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H +#ifndef ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H #define ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H - -#if defined HAVE_CONFIG_H - #include -#endif - -#if defined HADOOP_SNAPPY_LIBRARY - - #if defined HAVE_STDDEF_H - #include - #else - #error 'stddef.h not found' - #endif - - #if defined HAVE_SNAPPY_C_H - #include - #else - #error 'Please install snappy-development packages for your platform.' - #endif - - #if defined HAVE_DLFCN_H - #include - #else - #error "dlfcn.h not found" - #endif - - #if defined HAVE_JNI_H - #include - #else - #error 'jni.h not found' - #endif - - #include "org_apache_hadoop.h" - -#endif //define HADOOP_SNAPPY_LIBRARY +#include "org_apache_hadoop.h" +#include +#include +#include +#include #endif //ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am deleted file mode 100644 index 821f33f0527..00000000000 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am +++ /dev/null @@ -1,53 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# Makefile template for building native 'zlib' for hadoop. -# - -# -# Notes: -# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/$(subdir) . -# 2. This makefile depends on the following environment variables to function correctly: -# * HADOOP_NATIVE_SRCDIR -# * JAVA_HOME -# * JVM_DATA_MODEL -# * OS_ARCH -# * PLATFORM -# All these are setup by build.xml and/or the top-level makefile. -# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are -# assumed to be in $(HADOOP_PREFIX)/build/native/src/org/apache/hadoop/io/compress/zlib. -# - -# The 'vpath directive' to locate the actual source files -vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir) - -AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src -AM_LDFLAGS = @JNI_LDFLAGS@ -AM_CFLAGS = -g -Wall -fPIC -O2 -if SPECIFY_DATA_MODEL -AM_CFLAGS += -m$(JVM_DATA_MODEL) -endif - -noinst_LTLIBRARIES = libnativezlib.la -libnativezlib_la_SOURCES = ZlibCompressor.c ZlibDecompressor.c -libnativezlib_la_LIBADD = -ldl -ljvm - -# -#vim: sw=4: ts=4: noet -# diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c index 9ada3f03b05..689c783ef7e 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c @@ -16,34 +16,12 @@ * limitations under the License. */ -#if defined HAVE_CONFIG_H - #include -#endif - -#if defined HAVE_STDIO_H - #include -#else - #error 'stdio.h not found' -#endif - -#if defined HAVE_STDLIB_H - #include -#else - #error 'stdlib.h not found' -#endif - -#if defined HAVE_STRING_H - #include -#else - #error 'string.h not found' -#endif - -#if defined HAVE_DLFCN_H - #include -#else - #error 'dlfcn.h not found' -#endif +#include +#include +#include +#include +#include "config.h" #include "org_apache_hadoop_io_compress_zlib.h" #include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c index 3047dba2672..6abe36381f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c @@ -16,34 +16,12 @@ * limitations under the License. */ -#if defined HAVE_CONFIG_H - #include -#endif - -#if defined HAVE_STDIO_H - #include -#else - #error 'stdio.h not found' -#endif - -#if defined HAVE_STDLIB_H - #include -#else - #error 'stdlib.h not found' -#endif - -#if defined HAVE_STRING_H - #include -#else - #error 'string.h not found' -#endif - -#if defined HAVE_DLFCN_H - #include -#else - #error 'dlfcn.h not found' -#endif +#include +#include +#include +#include +#include "config.h" #include "org_apache_hadoop_io_compress_zlib.h" #include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h index 16b607b4a91..c53aa531c99 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h @@ -19,40 +19,13 @@ #if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H #define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H -#if defined HAVE_CONFIG_H - #include -#endif - -#if defined HAVE_STDDEF_H - #include -#else - #error 'stddef.h not found' -#endif - -#if defined HAVE_ZLIB_H - #include -#else - #error 'Please install zlib-development packages for your platform.' -#endif - -#if defined HAVE_ZCONF_H - #include -#else - #error 'Please install zlib-development packages for your platform.' -#endif - -#if defined HAVE_DLFCN_H - #include -#else - #error "dlfcn.h not found" -#endif - -#if defined HAVE_JNI_H - #include -#else - #error 'jni.h not found' -#endif +#include +#include +#include +#include +#include +#include "config.h" #include "org_apache_hadoop.h" /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */ diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c index fbcf9563ee4..c08ea037d9f 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c @@ -16,9 +16,6 @@ * limitations under the License. */ -// get the autoconf settings -#include "config.h" - #include #include #include @@ -32,6 +29,7 @@ #include #include +#include "config.h" #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_nativeio_NativeIO.h" #include "file_descriptor.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c index 869c2ba2e8e..dd51c0a2578 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c @@ -16,9 +16,6 @@ * limitations under the License. */ -// get the autoconf settings -#include "config.h" - #include #include #include @@ -26,6 +23,7 @@ #include #include +#include "config.h" #include "org_apache_hadoop.h" #include "org_apache_hadoop_util_NativeCrc32.h" #include "gcc_optimizations.h" diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h b/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h index 7a777c2f4f0..a50c41dbbb4 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h @@ -24,21 +24,10 @@ #if !defined ORG_APACHE_HADOOP_H #define ORG_APACHE_HADOOP_H -#if defined HAVE_CONFIG_H - #include -#endif +#include +#include -#if defined HAVE_DLFCN_H - #include -#else - #error "dlfcn.h not found" -#endif - -#if defined HAVE_JNI_H - #include -#else - #error 'jni.h not found' -#endif +#include "config.h" /* A helper macro to 'throw' a java exception. */ #define THROW(env, exception_name, message) \ diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index ebff0f57f59..c775c51e3ff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -415,76 +415,22 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> maven-antrun-plugin - compile + make compile - - run - + run - - - - + + + + + + + - - - - org.codehaus.mojo - make-maven-plugin - - - compile - compile - - autoreconf - configure - make-install - - - - ${project.build.directory}/native - - -i - -f - - - - - - ac_cv_func_malloc_0_nonnull - yes - - - JVM_ARCH - ${sun.arch.data.model} - - - - - ${project.build.directory}/native - /usr/local - - - - - ac_cv_func_malloc_0_nonnull - yes - - - JVM_ARCH - ${sun.arch.data.model} - - - - - ${project.build.directory}/native/target - - - - - - 4.0.0 - - org.apache.hadoop - hadoop-project - 3.0.0-SNAPSHOT - ../../../../../hadoop-project - - org.apache.hadoop.contrib - hadoop-hdfs-fuse - 3.0.0-SNAPSHOT - pom - - Apache Hadoop HDFS Fuse - Apache Hadoop HDFS Fuse - - - - org.apache.hadoop - hadoop-hdfs - compile - - - org.apache.hadoop - hadoop-hdfs - test - test-jar - - - - - - - - - org.apache.maven.plugins - maven-eclipse-plugin - 2.6 - - - org.apache.maven.plugins - maven-surefire-plugin - - 1 - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - - javadoc - - site - - true - true - false - ${maven.compile.source} - ${maven.compile.encoding} - - - HttpFs API - * - - - - - - - - org.apache.maven.plugins - maven-project-info-reports-plugin - - - - false - - - dependencies - - site - - - - - org.apache.rat - apache-rat-plugin - - - - - - - - - - - fuse - - false - - - - - org.apache.maven.plugins - maven-antrun-plugin - - - prepare-compile-native - generate-sources - - run - - - - - - - - - - - compile-fuse - compile - - run - - - - - - - - - - - - - - - - diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt new file mode 100644 index 00000000000..fb3c580e94c --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Find Linux FUSE +IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + find_package(PkgConfig REQUIRED) + pkg_check_modules(FUSE fuse) + IF(FUSE_FOUND) + FLATTEN_LIST("${FUSE_CFLAGS}" " " FUSE_CFLAGS) + FLATTEN_LIST("${FUSE_LDFLAGS}" " " FUSE_LDFLAGS) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FUSE_CFLAGS}") + set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} ${FUSE_LDFLAGS}") + MESSAGE(STATUS "Building Linux FUSE client.") + include_directories(${FUSE_INCLUDE_DIRS}) + ELSE(FUSE_FOUND) + MESSAGE(STATUS "Failed to find Linux FUSE libraries or include files. Will not build FUSE client.") + ENDIF(FUSE_FOUND) +ELSE (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + MESSAGE(STATUS "Non-Linux system detected. Will not build FUSE client.") +ENDIF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + +IF(FUSE_FOUND) + add_executable(fuse_dfs + fuse_dfs.c + fuse_options.c + fuse_connect.c + fuse_impls_access.c + fuse_impls_chmod.c + fuse_impls_chown.c + fuse_impls_create.c + fuse_impls_flush.c + fuse_impls_getattr.c + fuse_impls_mkdir.c + fuse_impls_mknod.c + fuse_impls_open.c + fuse_impls_read.c + fuse_impls_readdir.c + fuse_impls_release.c + fuse_impls_rename.c + fuse_impls_rmdir.c + fuse_impls_statfs.c + fuse_impls_symlink.c + fuse_impls_truncate.c + fuse_impls_unlink.c + fuse_impls_utimens.c + fuse_impls_write.c + fuse_init.c + fuse_stat_struct.c + fuse_trash.c + fuse_users.c + ) + target_link_libraries(fuse_dfs + ${FUSE_LIBRARIES} + ${JAVA_JVM_LIBRARY} + hdfs + m + ) +ENDIF(FUSE_FOUND) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am deleted file mode 100644 index 706297f314e..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am +++ /dev/null @@ -1,22 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -bin_PROGRAMS = fuse_dfs -fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c -AM_CFLAGS= -Wall -g -AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/../../src/main/native -I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include -AM_LDFLAGS= -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib64 -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib -L$(FUSE_HOME)/lib -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -fuse_dfs_LDADD=-lfuse -lhdfs -ljvm -lm diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h index 56ed9cb1738..4554dbdbea5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h +++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h @@ -31,13 +31,9 @@ #include #include -#ifdef HAVE_CONFIG_H -#include -#endif - -#ifdef HAVE_SETXATTR #include -#endif + +#include "config.h" // // Check if a path is in the mount option supplied protected paths. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am deleted file mode 100644 index 8bbd627315f..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am +++ /dev/null @@ -1,42 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -@PRODUCT_MK@ - -#AM_CPPFLAGS = -I$(top_srcdir) -ACLOCAL_AMFLAGS = -I m4 - -lib_LTLIBRARIES = libhdfs.la -libhdfs_la_SOURCES = hdfs.c hdfsJniHelper.c hdfs.h - -#check_PROGRAMS = hdfs_test hdfs_read hdfs_write -check_PROGRAMS = hdfs_test hdfs_read hdfs_write - -hdfs_test_SOURCES = hdfs_test.c hdfs.h -hdfs_test_LDADD = ${libdir}/libhdfs.la - -hdfs_read_SOURCES = hdfs_read.c -hdfs_read_LDADD = ${libdir}/libhdfs.la - -hdfs_write_SOURCES = hdfs_write.c -hdfs_write_LDADD = ${libdir}/libhdfs.la - -test: hdfs_test hdfs_read hdfs_write - ${LIBHDFS_SRC_DIR}/tests/test-libhdfs.sh - - -# vim: sw=4: ts=4: noet diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac deleted file mode 100644 index d801fc47385..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac +++ /dev/null @@ -1,125 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Autoconf input file -# $Id$ - -AC_INIT([libhdfs], [0.1.0], omalley@apache.org) -AC_PREFIX_DEFAULT([`pwd`/../install]) -AC_CONFIG_AUX_DIR([config]) - -# Generates Makefile from Makefile.am. Modify when new subdirs are added. -# Change Makefile.am also to add subdirectly. -AM_INIT_AUTOMAKE(foreign no-dist) -AC_CONFIG_FILES(Makefile) - -LT_INIT - -AC_CONFIG_MACRO_DIR([m4]) -dnl ------------------------------------------------------------------------- -dnl Check current host (forget about cross compilation) and validate it -dnl against the cache (fail if the cache differs) -dnl ------------------------------------------------------------------------- -AP_MSG_HEADER([Current host]) -AC_CANONICAL_HOST() -AP_CANONICAL_HOST_CHECK() - -dnl ------------------------------------------------------------------------- -dnl Check C environment -dnl ------------------------------------------------------------------------- -AP_MSG_HEADER([C-Language compilation tools]) -AC_PROG_CC() -AC_CHECK_TOOL(RANLIB, ranlib, :) - -dnl ------------------------------------------------------------------------- -dnl Check if this host is supported -dnl ------------------------------------------------------------------------- -AP_MSG_HEADER([Host support]) -AP_SUPPORTED_HOST() -if test "$supported_os" = "darwin" -then - if test -z "$JAVA_HOME" -a -d /System/Library/Frameworks/JavaVM.framework/Home; then - JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Home - fi - - _prevdir=`/bin/pwd` - if test -n "$JAVA_HOME" -a -d "$JAVA_HOME/include"; then - cd "$JAVA_HOME/include" - elif test -n "$JAVA_HOME" -a -d "$JAVA_HOME/../Headers"; then - cd "$JAVA_HOME/../Headers" - else - cd /System/Library/Frameworks/JavaVM.framework/Headers - fi - CFLAGS="$CFLAGS -m${JVM_ARCH} -I`/bin/pwd -P`" - cd $_prevdir - unset _prevdir -fi - -dnl ------------------------------------------------------------------------- -dnl Check JAVA environment -dnl ------------------------------------------------------------------------- -AP_MSG_HEADER([Java compilation tools]) -AP_JAVA() -AP_SABLEVM() -AP_KAFFE() -AP_PROG_JAVAC() -AP_PROG_JAR() -AP_JVM_LIBDIR() -if test "$supported_os" != "darwin" -then - case $host_cpu in - arm*) ;; - *) - CFLAGS="$CFLAGS -m${JVM_ARCH}" - LDFLAGS="$LDFLAGS -m${JVM_ARCH}" - ;; - esac - AC_MSG_RESULT([VALUE OF JVM_ARCH IS :$JVM_ARCH]) - CFLAGS="$CFLAGS -I$JAVA_HOME/include -I$JAVA_HOME/include/$supported_os" - LDFLAGS="$LDFLAGS -L$LIB_JVM_DIR -ljvm -Wl,-x" -fi - -dnl ------------------------------------------------------------------------- -dnl Add gcc specific CFLAGS. -dnl ------------------------------------------------------------------------- -if test "$GCC" = "yes" -then - CFLAGS="$CFLAGS -Wall -Wstrict-prototypes" - AC_MSG_RESULT([gcc flags added]) -fi -dnl ------------------------------------------------------------------------- -dnl Add gcc specific CFLAGS. -dnl ------------------------------------------------------------------------- -if test -z "$LDCMD" -then - LDCMD="$CC" -fi -AC_SUBST(LDCMD) - - -AC_PROG_CC -AC_PROG_LIBTOOL - -AC_TYPE_SIZE_T -AC_CHECK_FUNCS([strdup strerror strtoul]) -AC_CHECK_HEADERS([fcntl.h]) -AC_C_CONST -AC_C_VOLATILE -#AC_FUNC_MALLOC -AC_HEADER_STDBOOL -AC_SUBST(PRODUCT_MK) -AC_OUTPUT diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 deleted file mode 100644 index cb5938ffca5..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4 +++ /dev/null @@ -1,41 +0,0 @@ -dnl -dnl Licensed to the Apache Software Foundation (ASF) under one or more -dnl contributor license agreements. See the NOTICE file distributed with -dnl this work for additional information regarding copyright ownership. -dnl The ASF licenses this file to You under the Apache License, Version 2.0 -dnl (the "License"); you may not use this file except in compliance with -dnl the License. You may obtain a copy of the License at -dnl -dnl http://www.apache.org/licenses/LICENSE-2.0 -dnl -dnl Unless required by applicable law or agreed to in writing, software -dnl distributed under the License is distributed on an "AS IS" BASIS, -dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -dnl See the License for the specific language governing permissions and -dnl limitations under the License. -dnl - -dnl ------------------------------------------------------------------------- -dnl Author Pier Fumagalli -dnl Version $Id$ -dnl ------------------------------------------------------------------------- - -AC_DEFUN([AP_MSG_HEADER],[ - printf "*** %s ***\n" "$1" 1>&2 - AC_PROVIDE([$0]) -]) - -AC_DEFUN([AP_CANONICAL_HOST_CHECK],[ - AC_MSG_CHECKING([cached host system type]) - if { test x"${ac_cv_host_system_type+set}" = x"set" && - test x"$ac_cv_host_system_type" != x"$host" ; } - then - AC_MSG_RESULT([$ac_cv_host_system_type]) - AC_MSG_ERROR([remove the \"$cache_file\" file and re-run configure]) - else - AC_MSG_RESULT(ok) - ac_cv_host_system_type="$host" - fi - AC_PROVIDE([$0]) -]) - diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 deleted file mode 100644 index 993fc5bed93..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4 +++ /dev/null @@ -1,142 +0,0 @@ -dnl -dnl Licensed to the Apache Software Foundation (ASF) under one or more -dnl contributor license agreements. See the NOTICE file distributed with -dnl this work for additional information regarding copyright ownership. -dnl The ASF licenses this file to You under the Apache License, Version 2.0 -dnl (the "License"); you may not use this file except in compliance with -dnl the License. You may obtain a copy of the License at -dnl -dnl http://www.apache.org/licenses/LICENSE-2.0 -dnl -dnl Unless required by applicable law or agreed to in writing, software -dnl distributed under the License is distributed on an "AS IS" BASIS, -dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -dnl See the License for the specific language governing permissions and -dnl limitations under the License. -dnl - -dnl ------------------------------------------------------------------------- -dnl Author Pier Fumagalli -dnl Version $Id$ -dnl ------------------------------------------------------------------------- - -AC_DEFUN([AP_PROG_JAVAC_WORKS],[ - AC_CACHE_CHECK([wether the Java compiler ($JAVAC) works],ap_cv_prog_javac_works,[ - echo "public class Test {}" > Test.java - $JAVAC $JAVACFLAGS Test.java > /dev/null 2>&1 - if test $? -eq 0 - then - rm -f Test.java Test.class - ap_cv_prog_javac_works=yes - else - rm -f Test.java Test.class - AC_MSG_RESULT(no) - AC_MSG_ERROR([installation or configuration problem: javac cannot compile]) - fi - ]) -]) - -dnl Check for JAVA compilers. -AC_DEFUN([AP_PROG_JAVAC],[ - if test "$SABLEVM" != "NONE" - then - AC_PATH_PROG(JAVACSABLE,javac-sablevm,NONE,$JAVA_HOME/bin) - else - JAVACSABLE="NONE" - fi - if test "$JAVACSABLE" = "NONE" - then - XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH" - AC_PATH_PROG(JAVAC,javac,NONE,$XPATH) - else - AC_PATH_PROG(JAVAC,javac-sablevm,NONE,$JAVA_HOME/bin) - fi - AC_MSG_RESULT([$JAVAC]) - if test "$JAVAC" = "NONE" - then - AC_MSG_ERROR([javac not found]) - fi - AP_PROG_JAVAC_WORKS() - AC_PROVIDE([$0]) - AC_SUBST(JAVAC) - AC_SUBST(JAVACFLAGS) -]) - -dnl Check for jar archivers. -AC_DEFUN([AP_PROG_JAR],[ - if test "$SABLEVM" != "NONE" - then - AC_PATH_PROG(JARSABLE,jar-sablevm,NONE,$JAVA_HOME/bin) - else - JARSABLE="NONE" - fi - if test "$JARSABLE" = "NONE" - then - XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH" - AC_PATH_PROG(JAR,jar,NONE,$XPATH) - else - AC_PATH_PROG(JAR,jar-sablevm,NONE,$JAVA_HOME/bin) - fi - if test "$JAR" = "NONE" - then - AC_MSG_ERROR([jar not found]) - fi - AC_PROVIDE([$0]) - AC_SUBST(JAR) -]) - -AC_DEFUN([AP_JAVA],[ - AC_ARG_WITH(java,[ --with-java=DIR Specify the location of your JDK installation],[ - AC_MSG_CHECKING([JAVA_HOME]) - if test -d "$withval" - then - JAVA_HOME="$withval" - AC_MSG_RESULT([$JAVA_HOME]) - else - AC_MSG_RESULT([failed]) - AC_MSG_ERROR([$withval is not a directory]) - fi - AC_SUBST(JAVA_HOME) - ]) - if test x"$JAVA_HOME" = x - then - AC_MSG_ERROR([Java Home not defined. Rerun with --with-java=[...] parameter]) - fi -]) - -dnl check if the JVM in JAVA_HOME is sableVM -dnl $JAVA_HOME/bin/sablevm and /opt/java/lib/sablevm/bin are tested. -AC_DEFUN([AP_SABLEVM],[ - if test x"$JAVA_HOME" != x - then - AC_PATH_PROG(SABLEVM,sablevm,NONE,$JAVA_HOME/bin) - if test "$SABLEVM" = "NONE" - then - dnl java may be SableVM. - if $JAVA_HOME/bin/java -version 2> /dev/null | grep SableVM > /dev/null - then - SABLEVM=$JAVA_HOME/bin/java - fi - fi - if test "$SABLEVM" != "NONE" - then - AC_MSG_RESULT([Using sableVM: $SABLEVM]) - CFLAGS="$CFLAGS -DHAVE_SABLEVM" - fi - fi -]) - -dnl check if the JVM in JAVA_HOME is kaffe -dnl $JAVA_HOME/bin/kaffe is tested. -AC_DEFUN([AP_KAFFE],[ - if test x"$JAVA_HOME" != x - then - AC_PATH_PROG(KAFFEVM,kaffe,NONE,$JAVA_HOME/bin) - if test "$KAFFEVM" != "NONE" - then - AC_MSG_RESULT([Using kaffe: $KAFFEVM]) - CFLAGS="$CFLAGS -DHAVE_KAFFEVM" - LDFLAGS="$LDFLAGS -Wl,-rpath $JAVA_HOME/jre/lib/$HOST_CPU -L $JAVA_HOME/jre/lib/$HOST_CPU -lkaffevm" - fi - fi -]) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 deleted file mode 100644 index 0c8b262dcbb..00000000000 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4 +++ /dev/null @@ -1,168 +0,0 @@ -dnl -dnl Licensed to the Apache Software Foundation (ASF) under one or more -dnl contributor license agreements. See the NOTICE file distributed with -dnl this work for additional information regarding copyright ownership. -dnl The ASF licenses this file to You under the Apache License, Version 2.0 -dnl (the "License"); you may not use this file except in compliance with -dnl the License. You may obtain a copy of the License at -dnl -dnl http://www.apache.org/licenses/LICENSE-2.0 -dnl -dnl Unless required by applicable law or agreed to in writing, software -dnl distributed under the License is distributed on an "AS IS" BASIS, -dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -dnl See the License for the specific language governing permissions and -dnl limitations under the License. -dnl - -dnl ------------------------------------------------------------------------- -dnl Author Pier Fumagalli -dnl Version $Id$ -dnl ------------------------------------------------------------------------- - -AC_DEFUN([AP_SUPPORTED_HOST],[ - AC_MSG_CHECKING([C flags dependant on host system type]) - - case $host_os in - darwin*) - CFLAGS="$CFLAGS -DOS_DARWIN -DDSO_DYLD" - supported_os="darwin" - ;; - solaris*) - CFLAGS="$CFLAGS -DOS_SOLARIS -DDSO_DLFCN" - supported_os="solaris" - LIBS="$LIBS -ldl -lthread" - ;; - linux*) - CFLAGS="$CFLAGS -DOS_LINUX -DDSO_DLFCN" - supported_os="linux" - LIBS="$LIBS -ldl -lpthread" - ;; - cygwin) - CFLAGS="$CFLAGS -DOS_CYGWIN -DDSO_DLFCN -DNO_SETSID" - supported_os="win32" - ;; - sysv) - CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN" - LIBS="$LIBS -ldl" - ;; - sysv4) - CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN -Kthread" - LDFLAGS="-Kthread $LDFLAGS" - LIBS="$LIBS -ldl" - ;; - freebsd*) - CFLAGS="$CFLAGS -DOS_FREEBSD -DDSO_DLFCN -D_THREAD_SAFE -pthread" - LDFLAGS="-pthread $LDFLAGS" - supported_os="freebsd" - ;; - osf5*) - CFLAGS="$CFLAGS -pthread -DOS_TRU64 -DDSO_DLFCN -D_XOPEN_SOURCE_EXTENDED" - LDFLAGS="$LDFLAGS -pthread" - ;; - hpux11*) - CFLAGS="$CFLAGS -pthread -DOS_HPUX -DDSO_DLFCN" - LDFLAGS="$LDFLAGS -pthread" - LIBS="$LIBS -lpthread" - ;; - *) - AC_MSG_RESULT([failed]) - AC_MSG_ERROR([Unsupported operating system "$host_os"]);; - esac - - case $host_cpu in - powerpc*) - CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\"" - HOST_CPU=$host_cpu;; - sparc*) - CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\"" - HOST_CPU=$host_cpu;; - i?86) - CFLAGS="$CFLAGS -DCPU=\\\"i386\\\"" - HOST_CPU=i386;; - x86_64) - CFLAGS="$CFLAGS -DCPU=\\\"amd64\\\"" - HOST_CPU=amd64;; - bs2000) - CFLAGS="$CFLAGS -DCPU=\\\"osd\\\" -DCHARSET_EBCDIC -DOSD_POSIX" - supported_os="osd" - LDFLAGS="-Kno_link_stdlibs -B llm4" - LIBS="$LIBS -lBLSLIB" - LDCMD="/opt/C/bin/cc" - HOST_CPU=osd;; - mips) - CFLAGS="$CFLAGS -DCPU=\\\"mips\\\"" - supported_os="mips" - HOST_CPU=mips;; - alpha*) - CFLAGS="$CFLAGS -DCPU=\\\"alpha\\\"" - supported_os="alpha" - HOST_CPU=alpha;; - hppa2.0w) - CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0W\\\"" - supported_os="hp-ux" - HOST_CPU=PA_RISC2.0W;; - hppa2.0) - CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0\\\"" - supported_os="hp-ux" - HOST_CPU=PA_RISC2.0;; - mipsel) - CFLAGS="$CFLAGS -DCPU=\\\"mipsel\\\"" - supported_os="mipsel" - HOST_CPU=mipsel;; - ia64) - CFLAGS="$CFLAGS -DCPU=\\\"ia64\\\"" - supported_os="ia64" - HOST_CPU=ia64;; - s390) - CFLAGS="$CFLAGS -DCPU=\\\"s390\\\"" - supported_os="s390" - HOST_CPU=s390;; - arm*) - CFLAGS="$CFLAGS -DCPU=\\\"arm\\\"" - supported_os="arm" - HOST_CPU=arm;; - *) - AC_MSG_RESULT([failed]) - AC_MSG_ERROR([Unsupported CPU architecture "$host_cpu"]);; - esac - - AC_MSG_RESULT([ok]) - AC_SUBST(CFLAGS) - AC_SUBST(LDFLAGS) -]) - -AC_DEFUN([AP_JVM_LIBDIR],[ - AC_MSG_CHECKING([where on earth this jvm library is..]) - javabasedir=$JAVA_HOME - case $host_os in - cygwin* | mingw* | pw23* ) - lib_jvm_dir=`find $javabasedir -follow \( \ - \( -name client -type d -prune \) -o \ - \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` - ;; - aix*) - lib_jvm_dir=`find $javabasedir \( \ - \( -name client -type d -prune \) -o \ - \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` - if test -z "$lib_jvm_dir"; then - lib_jvm_dir=`find $javabasedir \( \ - \( -name client -type d -prune \) -o \ - \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` - fi - ;; - *) - lib_jvm_dir=`find $javabasedir -follow \( \ - \( -name client -type d -prune \) -o \ - \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` - if test -z "$lib_jvm_dir"; then - lib_jvm_dir=`find $javabasedir -follow \( \ - \( -name client -type d -prune \) -o \ - \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` - fi - ;; - esac - LIB_JVM_DIR=$lib_jvm_dir - AC_MSG_RESULT([ohh u there ... $LIB_JVM_DIR]) - AC_SUBST(LIB_JVM_DIR) -]) diff --git a/hadoop-hdfs-project/pom.xml b/hadoop-hdfs-project/pom.xml index 0e2684b1ea3..27161004a36 100644 --- a/hadoop-hdfs-project/pom.xml +++ b/hadoop-hdfs-project/pom.xml @@ -34,7 +34,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> hadoop-hdfs hadoop-hdfs-httpfs hadoop-hdfs/src/contrib/bkjournal - hadoop-hdfs/src/contrib/fuse-dfs diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml index fb3c97b521c..f865b2dc595 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml @@ -47,47 +47,37 @@ - org.codehaus.mojo - make-maven-plugin + org.apache.maven.plugins + maven-antrun-plugin - compile + make compile - - autoreconf - configure - make-install - + run + + + + + + + + + + + + - test + native_tests test - - test - + + + + + + - - - ${project.build.directory}/native/container-executor - - -i - - - - - - CFLAGS - -DHADOOP_CONF_DIR=${container-executor.conf.dir} ${container-executor.additional_cflags} - - - ${project.build.directory}/native/container-executor - /usr/local - - - ${project.build.directory}/native/target - - @@ -172,14 +162,6 @@ run - - - - - - - - diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt new file mode 100644 index 00000000000..ace151a68bf --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cmake_minimum_required(VERSION 2.6 FATAL_ERROR) + +set(CMAKE_BUILD_TYPE, Release) + +if (JVM_ARCH_DATA_MODEL EQUAL 32) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32") + set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32") + if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64") + set(CMAKE_SYSTEM_PROCESSOR "i686") + endif () +endif (JVM_ARCH_DATA_MODEL EQUAL 32) + +function(output_directory TGT DIR) + SET_TARGET_PROPERTIES(${TGT} PROPERTIES + RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") + SET_TARGET_PROPERTIES(${TGT} PROPERTIES + ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") + SET_TARGET_PROPERTIES(${TGT} PROPERTIES + LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}") +endfunction(output_directory TGT DIR) + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2 -D_GNU_SOURCE") +# note: can't enable -D_LARGEFILE: see MAPREDUCE-4258 +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT") + +include_directories( + ${CMAKE_CURRENT_SOURCE_DIR} + ${CMAKE_BINARY_DIR} + main/native/container-executor + main/native/container-executor/impl +) +CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h) + +add_library(container + main/native/container-executor/impl/configuration.c + main/native/container-executor/impl/container-executor.c +) + +add_executable(container-executor + main/native/container-executor/impl/main.c +) +target_link_libraries(container-executor + container +) +output_directory(container-executor target/usr/local/bin) + +add_executable(test-container-executor + main/native/container-executor/test/test-container-executor.c +) +target_link_libraries(test-container-executor + container +) +output_directory(test-container-executor target/usr/local/bin) diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake new file mode 100644 index 00000000000..1fff36131f6 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake @@ -0,0 +1,6 @@ +#ifndef CONFIG_H +#define CONFIG_H + +#cmakedefine HADOOP_CONF_DIR "@HADOOP_CONF_DIR@" + +#endif diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg deleted file mode 100644 index d21d1c9877a..00000000000 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg +++ /dev/null @@ -1,42 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# autom4te configuration for hadoop utils library -# - -begin-language: "Autoheader-preselections" -args: --no-cache -end-language: "Autoheader-preselections" - -begin-language: "Automake-preselections" -args: --no-cache -end-language: "Automake-preselections" - -begin-language: "Autoreconf-preselections" -args: --no-cache -end-language: "Autoreconf-preselections" - -begin-language: "Autoconf-without-aclocal-m4" -args: --no-cache -end-language: "Autoconf-without-aclocal-m4" - -begin-language: "Autoconf" -args: --no-cache -end-language: "Autoconf" - diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po deleted file mode 100644 index 9ce06a81ea4..00000000000 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po +++ /dev/null @@ -1 +0,0 @@ -# dummy diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am deleted file mode 100644 index 4938bb2f53a..00000000000 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am +++ /dev/null @@ -1,32 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -AM_CFLAGS=-I$(srcdir)/impl -Wall -g -Werror - -# Define the programs that need to be built -bin_PROGRAMS = container-executor -check_PROGRAMS = test-container-executor - -TESTS = test-container-executor - -# Define the sources for the common files -common_SOURCES = impl/configuration.c impl/container-executor.c - -# Define the sources for the real executable -container_executor_SOURCES = $(common_SOURCES) impl/main.c - -# Define the sources for the test executable -test_container_executor_SOURCES = $(common_SOURCES) test/test-container-executor.c diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac deleted file mode 100644 index db8af88cf12..00000000000 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -*- Autoconf -*- -# Process this file with autoconf to produce a configure script. - -AC_PREREQ(2.59) -AC_INIT(linux-container-executor, 1.0.0, mapreduce-dev@hadoop.apache.org) -AC_GNU_SOURCE -#AC_SYS_LARGEFILE - -AM_INIT_AUTOMAKE([subdir-objects foreign no-dist]) - -AC_CONFIG_SRCDIR([impl/container-executor.c]) -AC_CONFIG_FILES([Makefile]) - -AC_PREFIX_DEFAULT(`pwd`/../install) - -CHECK_INSTALL_CFLAG -HADOOP_UTILS_SETUP - -# Checks for programs. -AC_PROG_CC -AM_PROG_CC_C_O - -# Checks for libraries. - -# Checks for header files. -AC_LANG(C) -AC_CHECK_HEADERS([unistd.h]) - -# Checks for typedefs, structures, and compiler characteristics. -AC_HEADER_STDBOOL -AC_C_CONST -AC_TYPE_OFF_T -AC_TYPE_SIZE_T -AC_FUNC_STRERROR_R - -# Checks for library functions. -AC_CHECK_FUNCS([mkdir uname]) -AC_OUTPUT diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c index cd8caabe333..d6ce5aa7061 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c @@ -16,6 +16,7 @@ * limitations under the License. */ +#include "config.h" #include "configuration.h" #include "container-executor.h" @@ -29,8 +30,6 @@ #include #include -#define _STRINGIFY(X) #X -#define STRINGIFY(X) _STRINGIFY(X) #define CONF_FILENAME "container-executor.cfg" // When building as part of a Maven build this value gets defined by using @@ -101,7 +100,7 @@ int main(int argc, char **argv) { char *executable_file = get_executable(); - char *orig_conf_file = STRINGIFY(HADOOP_CONF_DIR) "/" CONF_FILENAME; + char *orig_conf_file = HADOOP_CONF_DIR "/" CONF_FILENAME; char *conf_file = resolve_config_path(orig_conf_file, argv[0]); char *local_dirs, *log_dirs;