HADOOP-8368. Use CMake rather than autotools to build native code (ccccabe via tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1348957 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2012-06-11 18:34:40 +00:00
parent 56d2ef6f5e
commit 8a9e63e468
44 changed files with 511 additions and 1991 deletions

View File

@ -536,31 +536,10 @@
<snappy.prefix>/usr/local</snappy.prefix>
<snappy.lib>${snappy.prefix}/lib</snappy.lib>
<snappy.include>${snappy.prefix}/include</snappy.include>
<runas.home></runas.home>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native/javah"/>
<copy toDir="${project.build.directory}/native">
<fileset dir="${basedir}/src/main/native"/>
</copy>
<mkdir dir="${project.build.directory}/native/m4"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>native-maven-plugin</artifactId>
@ -590,73 +569,27 @@
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>make-maven-plugin</artifactId>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<id>make</id>
<phase>compile</phase>
<goals>
<goal>autoreconf</goal>
<goal>configure</goal>
<goal>make-install</goal>
</goals>
<goals><goal>run</goal></goals>
<configuration>
<target>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
<env key="CFLAGS" value="-I${snappy.include}"/>
<env key="LDFLAGS" value="-L${snappy.lib}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
</target>
</configuration>
</execution>
</executions>
<configuration>
<!-- autoreconf settings -->
<workDir>${project.build.directory}/native</workDir>
<arguments>
<argument>-i</argument>
<argument>-f</argument>
</arguments>
<!-- configure settings -->
<configureEnvironment>
<property>
<name>OS_NAME</name>
<value>${os.name}</value>
</property>
<property>
<name>OS_ARCH</name>
<value>${os.arch}</value>
</property>
<property>
<name>JVM_DATA_MODEL</name>
<value>${sun.arch.data.model}</value>
</property>
</configureEnvironment>
<configureOptions>
<configureOption>CPPFLAGS=-I${snappy.include}</configureOption>
<configureOption>LDFLAGS=-L${snappy.lib}</configureOption>
</configureOptions>
<configureWorkDir>${project.build.directory}/native</configureWorkDir>
<prefix>/usr/local</prefix>
<!-- make settings -->
<installEnvironment>
<property>
<name>OS_NAME</name>
<value>${os.name}</value>
</property>
<property>
<name>OS_ARCH</name>
<value>${os.arch}</value>
</property>
<property>
<name>JVM_DATA_MODEL</name>
<value>${sun.arch.data.model}</value>
</property>
<property>
<name>HADOOP_NATIVE_SRCDIR</name>
<value>${project.build.directory}/native</value>
</property>
</installEnvironment>
<!-- configure & make settings -->
<destDir>${project.build.directory}/native/target</destDir>
</configuration>
</plugin>
</plugins>
</build>
@ -700,7 +633,7 @@
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<id>kdc</id>
<phase>compile</phase>
<goals>
<goal>run</goal>

View File

@ -0,0 +1,126 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
# Default to release builds
set(CMAKE_BUILD_TYPE, Release)
# If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
# This variable is set by maven.
if (JVM_ARCH_DATA_MODEL EQUAL 32)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32")
set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32")
if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
set(CMAKE_SYSTEM_PROCESSOR "i686")
endif ()
endif (JVM_ARCH_DATA_MODEL EQUAL 32)
# Compile a library with both shared and static variants
function(add_dual_library LIBNAME)
add_library(${LIBNAME} SHARED ${ARGN})
add_library(${LIBNAME}_static STATIC ${ARGN})
set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
endfunction(add_dual_library)
# Link both a static and a dynamic target against some libraries
function(target_link_dual_libraries LIBNAME)
target_link_libraries(${LIBNAME} ${ARGN})
target_link_libraries(${LIBNAME}_static ${ARGN})
endfunction(target_link_dual_libraries)
function(output_directory TGT DIR)
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
endfunction(output_directory TGT DIR)
function(dual_output_directory TGT DIR)
output_directory(${TGT} "${DIR}")
output_directory(${TGT}_static "${DIR}")
endfunction(dual_output_directory TGT DIR)
if (NOT GENERATED_JAVAH)
# Must identify where the generated headers have been placed
MESSAGE(FATAL_ERROR "You must set the cmake variable GENERATED_JAVAH")
endif (NOT GENERATED_JAVAH)
find_package(JNI REQUIRED)
find_package(ZLIB REQUIRED)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
set(D main/native/src/org/apache/hadoop)
GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
INCLUDE(CheckFunctionExists)
INCLUDE(CheckCSourceCompiles)
CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
find_library(SNAPPY_LIBRARY NAMES snappy PATHS)
find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS)
if (SNAPPY_LIBRARY)
GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
set(SNAPPY_SOURCE_FILES
"${D}/io/compress/snappy/SnappyCompressor.c"
"${D}/io/compress/snappy/SnappyDecompressor.c")
else (${SNAPPY_LIBRARY})
set(SNAPPY_INCLUDE_DIR "")
set(SNAPPY_SOURCE_FILES "")
endif (SNAPPY_LIBRARY)
include_directories(
${GENERATED_JAVAH}
main/native/src
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/src
${CMAKE_BINARY_DIR}
${JNI_INCLUDE_DIRS}
${ZLIB_INCLUDE_DIRS}
${SNAPPY_INCLUDE_DIR}
)
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
add_dual_library(hadoop
${D}/io/compress/lz4/Lz4Compressor.c
${D}/io/compress/lz4/Lz4Decompressor.c
${D}/io/compress/lz4/lz4.c
${SNAPPY_SOURCE_FILES}
${D}/io/compress/zlib/ZlibCompressor.c
${D}/io/compress/zlib/ZlibDecompressor.c
${D}/io/nativeio/NativeIO.c
${D}/io/nativeio/errno_enum.c
${D}/io/nativeio/file_descriptor.c
${D}/security/JniBasedUnixGroupsMapping.c
${D}/security/JniBasedUnixGroupsNetgroupMapping.c
${D}/security/getGroup.c
${D}/util/NativeCrc32.c
${D}/util/bulk_crc32.c
)
target_link_dual_libraries(hadoop
dl
${JAVA_JVM_LIBRARY}
)
SET(LIBHADOOP_VERSION "1.0.0")
SET_TARGET_PROPERTIES(hadoop PROPERTIES
SOVERSION ${LIBHADOOP_VERSION})
dual_output_directory(hadoop target/usr/local/lib)

View File

@ -0,0 +1,10 @@
#ifndef CONFIG_H
#define CONFIG_H
#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@"
#cmakedefine HADOOP_RUNAS_HOME "@HADOOP_RUNAS_HOME@"
#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@"
#cmakedefine HAVE_SYNC_FILE_RANGE
#cmakedefine HAVE_POSIX_FADVISE
#endif

View File

@ -1,42 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# autom4te configuration for hadoop-native library
#
begin-language: "Autoheader-preselections"
args: --no-cache
end-language: "Autoheader-preselections"
begin-language: "Automake-preselections"
args: --no-cache
end-language: "Automake-preselections"
begin-language: "Autoreconf-preselections"
args: --no-cache
end-language: "Autoreconf-preselections"
begin-language: "Autoconf-without-aclocal-m4"
args: --no-cache
end-language: "Autoconf-without-aclocal-m4"
begin-language: "Autoconf"
args: --no-cache
end-language: "Autoconf"

View File

@ -1,66 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Notes:
# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os-arch}.
# 2. This makefile depends on the following environment variables to function correctly:
# * HADOOP_NATIVE_SRCDIR
# * JAVA_HOME
# * JVM_DATA_MODEL
# * OS_NAME
# * OS_ARCH
# All these are setup by build.xml.
#
# Export $(PLATFORM) to prevent proliferation of sub-shells
export PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z])
ACLOCAL_AMFLAGS = -I m4
AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
-I$(HADOOP_NATIVE_SRCDIR)/javah
AM_LDFLAGS = @JNI_LDFLAGS@
AM_CFLAGS = -g -Wall -fPIC -O2
if SPECIFY_DATA_MODEL
AM_LDFLAGS += -m$(JVM_DATA_MODEL)
AM_CFLAGS += -m$(JVM_DATA_MODEL)
endif
lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c \
src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c \
src/org/apache/hadoop/io/compress/lz4/lz4.c \
src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c \
src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c \
src/org/apache/hadoop/security/getGroup.c \
src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \
src/org/apache/hadoop/io/nativeio/file_descriptor.c \
src/org/apache/hadoop/io/nativeio/errno_enum.c \
src/org/apache/hadoop/io/nativeio/NativeIO.c \
src/org/apache/hadoop/util/NativeCrc32.c \
src/org/apache/hadoop/util/bulk_crc32.c
libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
libhadoop_la_LIBADD = -ldl -ljvm
#
#vim: sw=4: ts=4: noet
#

View File

@ -1,28 +0,0 @@
# AC_COMPUTE_NEEDED_DSO(LIBRARY, TEST_PROGRAM, PREPROC_SYMBOL)
# --------------------------------------------------
# Compute the 'actual' dynamic-library used
# for LIBRARY and set it to PREPROC_SYMBOL
AC_DEFUN([AC_COMPUTE_NEEDED_DSO],
[
AC_CACHE_CHECK([Checking for the 'actual' dynamic-library for '-l$1'], ac_cv_libname_$1,
[
echo '$2' > conftest.c
if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -l$1 2>&1`"; then
dnl Try objdump and ldd in that order to get the dynamic library
if test ! -z "`which objdump | grep -v 'no objdump'`"; then
ac_cv_libname_$1="`objdump -p conftest | grep NEEDED | grep $1 | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
elif test ! -z "`which ldd | grep -v 'no ldd'`"; then
ac_cv_libname_$1="`ldd conftest | grep $1 | sed 's/^[[[^A-Za-z0-9]]]*\([[[A-Za-z0-9\.]]]*\)[[[^A-Za-z0-9]]]*=>.*$/\"\1\"/'`"
elif test ! -z "`which otool | grep -v 'no otool'`"; then
ac_cv_libname_$1=\"`otool -L conftest | grep $1 | sed -e 's/^[ ]*//' -e 's/ .*//' -e 's/.*\/\(.*\)$/\1/'`\";
else
AC_MSG_ERROR(Can't find either 'objdump' or 'ldd' or 'otool' to compute the dynamic library for '-l$1')
fi
else
ac_cv_libname_$1=libnotfound.so
fi
rm -f conftest*
]
)
AC_DEFINE_UNQUOTED($3, ${ac_cv_libname_$1}, [The 'actual' dynamic-library for '-l$1'])
])# AC_COMPUTE_NEEDED_DSO

View File

@ -1,130 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# configure.ac for hadoop native code.
#
# Notes:
# 1. This configure.ac depends on the following environment variables to function correctly:
# * HADOOP_NATIVE_SRCDIR
# * JAVA_HOME
# * JVM_DATA_MODEL
# * OS_NAME
# * OS_ARCH
# All these are setup by build.xml.
# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
#
AC_PREREQ(2.59)
AC_INIT(src/org_apache_hadoop.h)
AC_CONFIG_SRCDIR([src/org_apache_hadoop.h])
AC_CONFIG_AUX_DIR([config])
AC_CONFIG_MACRO_DIR([m4])
AC_CONFIG_HEADER([config.h])
AC_SYS_LARGEFILE
AC_GNU_SOURCE
AM_INIT_AUTOMAKE(hadoop,1.0.0)
# Checks for programs.
AC_PROG_CC
AC_PROG_LIBTOOL
# Checks for libraries.
dnl Check for '-ldl'
AC_CHECK_LIB([dl], [dlopen])
dnl Check for '-ljvm'
JNI_LDFLAGS=""
if test $JAVA_HOME != ""
then
JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server"
JVMSOPATH=`find $JAVA_HOME/jre/ -name libjvm.so | head -n 1`
JNI_LDFLAGS="$JNI_LDFLAGS -L`dirname $JVMSOPATH`"
fi
LDFLAGS="$LDFLAGS $JNI_LDFLAGS"
AC_CHECK_LIB([jvm], [JNI_GetCreatedJavaVMs])
AC_SUBST([JNI_LDFLAGS])
# Checks for header files.
dnl Check for Ansi C headers
AC_HEADER_STDC
dnl Check for other standard C headers
AC_CHECK_HEADERS([stdio.h stddef.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
dnl Check for JNI headers
JNI_CPPFLAGS=""
if test $JAVA_HOME != ""
then
for dir in `find $JAVA_HOME/include -follow -type d`
do
JNI_CPPFLAGS="$JNI_CPPFLAGS -I$dir"
done
fi
cppflags_bak=$CPPFLAGS
CPPFLAGS="$CPPFLAGS $JNI_CPPFLAGS"
AC_CHECK_HEADERS([jni.h], [], AC_MSG_ERROR([Native java headers not found. Is \$JAVA_HOME set correctly?]))
CPPFLAGS=$cppflags_bak
AC_SUBST([JNI_CPPFLAGS])
dnl Check for zlib headers
AC_CHECK_HEADERS([zlib.h zconf.h],
AC_COMPUTE_NEEDED_DSO(z,
[#include "zlib.h"
int main(int argc, char **argv){zlibVersion();return 0;}],
HADOOP_ZLIB_LIBRARY),
AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.))
dnl Check for snappy headers
AC_CHECK_HEADERS([snappy-c.h],
AC_COMPUTE_NEEDED_DSO(snappy,
[#include "snappy-c.h"
int main(int argc, char **argv){snappy_compress(0,0,0,0);return 0;}],
HADOOP_SNAPPY_LIBRARY),
AC_MSG_WARN(Snappy headers were not found... building without snappy.))
dnl Check for headers needed by the native Group resolution implementation
AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
dnl check for posix_fadvise
AC_CHECK_HEADERS(fcntl.h, [AC_CHECK_FUNCS(posix_fadvise)])
dnl check for sync_file_range
AC_CHECK_HEADERS(fcntl.h, [AC_CHECK_FUNCS(sync_file_range)])
# Checks for typedefs, structures, and compiler characteristics.
AC_C_CONST
# Checks for library functions.
AC_CHECK_FUNCS([memset])
# Check for nonstandard STRERROR_R
AC_FUNC_STRERROR_R
AM_CONDITIONAL([SPECIFY_DATA_MODEL], [case $host_cpu in arm*) false;; *) true;; esac])
AC_CONFIG_FILES([Makefile])
AC_OUTPUT
#
#vim: sw=2: ts=2: noet
#

View File

@ -1,47 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Makefile template for building libhadoop.so
#
#
# Notes:
# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/lib
# 2. This makefile depends on the following environment variables to function correctly:
# * HADOOP_NATIVE_SRCDIR
# * JAVA_HOME
# * OS_ARCH
# All these are setup by build.xml and/or the top-level makefile.
#
# Add .lo files in $(SUBDIRS) to construct libhadoop.so
HADOOP_OBJS = $(foreach path,$(addprefix ../,$(SUBDIRS)),$(wildcard $(path)/*.lo))
AM_LDFLAGS = @JNI_LDFLAGS@
if SPECIFY_DATA_MODEL
AM_LDFLAGS += -m$(JVM_DATA_MODEL)
endif
lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES =
libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm
#
#vim: sw=4: ts=4: noet
#

View File

@ -16,10 +16,7 @@
* limitations under the License.
*/
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#include "config.h"
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h"

View File

@ -16,10 +16,7 @@
* limitations under the License.
*/
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#include "config.h"
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"

View File

@ -16,36 +16,12 @@
* limitations under the License.
*/
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HADOOP_SNAPPY_LIBRARY
#if defined HAVE_STDIO_H
#include <stdio.h>
#else
#error 'stdio.h not found'
#endif
#if defined HAVE_STDLIB_H
#include <stdlib.h>
#else
#error 'stdlib.h not found'
#endif
#if defined HAVE_STRING_H
#include <string.h>
#else
#error 'string.h not found'
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error 'dlfcn.h not found'
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "config.h"
#include "org_apache_hadoop_io_compress_snappy.h"
#include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
@ -123,5 +99,3 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
return (jint)compressed_direct_buf_len;
}
#endif //define HADOOP_SNAPPY_LIBRARY

View File

@ -16,36 +16,12 @@
* limitations under the License.
*/
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HADOOP_SNAPPY_LIBRARY
#if defined HAVE_STDIO_H
#include <stdio.h>
#else
#error 'stdio.h not found'
#endif
#if defined HAVE_STDLIB_H
#include <stdlib.h>
#else
#error 'stdlib.h not found'
#endif
#if defined HAVE_STRING_H
#include <string.h>
#else
#error 'string.h not found'
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error 'dlfcn.h not found'
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "config.h"
#include "org_apache_hadoop_io_compress_snappy.h"
#include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h"
@ -127,5 +103,3 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompres
return (jint)uncompressed_direct_buf_len;
}
#endif //define HADOOP_SNAPPY_LIBRARY

View File

@ -17,42 +17,13 @@
*/
#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H
#ifndef ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H
#define ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HADOOP_SNAPPY_LIBRARY
#if defined HAVE_STDDEF_H
#include <stddef.h>
#else
#error 'stddef.h not found'
#endif
#if defined HAVE_SNAPPY_C_H
#include <snappy-c.h>
#else
#error 'Please install snappy-development packages for your platform.'
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error "dlfcn.h not found"
#endif
#if defined HAVE_JNI_H
#include <jni.h>
#else
#error 'jni.h not found'
#endif
#include "org_apache_hadoop.h"
#endif //define HADOOP_SNAPPY_LIBRARY
#include <dlfcn.h>
#include <jni.h>
#include <snappy-c.h>
#include <stddef.h>
#endif //ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H

View File

@ -1,53 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Makefile template for building native 'zlib' for hadoop.
#
#
# Notes:
# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/$(subdir) .
# 2. This makefile depends on the following environment variables to function correctly:
# * HADOOP_NATIVE_SRCDIR
# * JAVA_HOME
# * JVM_DATA_MODEL
# * OS_ARCH
# * PLATFORM
# All these are setup by build.xml and/or the top-level makefile.
# 3. The creation of requisite jni headers/stubs are also done by build.xml and they are
# assumed to be in $(HADOOP_PREFIX)/build/native/src/org/apache/hadoop/io/compress/zlib.
#
# The 'vpath directive' to locate the actual source files
vpath %.c $(HADOOP_NATIVE_SRCDIR)/$(subdir)
AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src
AM_LDFLAGS = @JNI_LDFLAGS@
AM_CFLAGS = -g -Wall -fPIC -O2
if SPECIFY_DATA_MODEL
AM_CFLAGS += -m$(JVM_DATA_MODEL)
endif
noinst_LTLIBRARIES = libnativezlib.la
libnativezlib_la_SOURCES = ZlibCompressor.c ZlibDecompressor.c
libnativezlib_la_LIBADD = -ldl -ljvm
#
#vim: sw=4: ts=4: noet
#

View File

@ -16,34 +16,12 @@
* limitations under the License.
*/
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HAVE_STDIO_H
#include <stdio.h>
#else
#error 'stdio.h not found'
#endif
#if defined HAVE_STDLIB_H
#include <stdlib.h>
#else
#error 'stdlib.h not found'
#endif
#if defined HAVE_STRING_H
#include <string.h>
#else
#error 'string.h not found'
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error 'dlfcn.h not found'
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "config.h"
#include "org_apache_hadoop_io_compress_zlib.h"
#include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"

View File

@ -16,34 +16,12 @@
* limitations under the License.
*/
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HAVE_STDIO_H
#include <stdio.h>
#else
#error 'stdio.h not found'
#endif
#if defined HAVE_STDLIB_H
#include <stdlib.h>
#else
#error 'stdlib.h not found'
#endif
#if defined HAVE_STRING_H
#include <string.h>
#else
#error 'string.h not found'
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error 'dlfcn.h not found'
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "config.h"
#include "org_apache_hadoop_io_compress_zlib.h"
#include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"

View File

@ -19,40 +19,13 @@
#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
#define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HAVE_STDDEF_H
#include <stddef.h>
#else
#error 'stddef.h not found'
#endif
#if defined HAVE_ZLIB_H
#include <zlib.h>
#else
#error 'Please install zlib-development packages for your platform.'
#endif
#if defined HAVE_ZCONF_H
#include <zconf.h>
#else
#error 'Please install zlib-development packages for your platform.'
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error "dlfcn.h not found"
#endif
#if defined HAVE_JNI_H
#include <jni.h>
#else
#error 'jni.h not found'
#endif
#include <stddef.h>
#include <zconf.h>
#include <zlib.h>
#include "config.h"
#include "org_apache_hadoop.h"
/* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */

View File

@ -16,9 +16,6 @@
* limitations under the License.
*/
// get the autoconf settings
#include "config.h"
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
@ -32,6 +29,7 @@
#include <sys/syscall.h>
#include <unistd.h>
#include "config.h"
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_nativeio_NativeIO.h"
#include "file_descriptor.h"

View File

@ -16,9 +16,6 @@
* limitations under the License.
*/
// get the autoconf settings
#include "config.h"
#include <arpa/inet.h>
#include <assert.h>
#include <stdlib.h>
@ -26,6 +23,7 @@
#include <string.h>
#include <unistd.h>
#include "config.h"
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_util_NativeCrc32.h"
#include "gcc_optimizations.h"

View File

@ -24,21 +24,10 @@
#if !defined ORG_APACHE_HADOOP_H
#define ORG_APACHE_HADOOP_H
#if defined HAVE_CONFIG_H
#include <config.h>
#endif
#if defined HAVE_DLFCN_H
#include <dlfcn.h>
#else
#error "dlfcn.h not found"
#endif
#if defined HAVE_JNI_H
#include <jni.h>
#else
#error 'jni.h not found'
#endif
#include "config.h"
/* A helper macro to 'throw' a java exception. */
#define THROW(env, exception_name, message) \

View File

@ -415,76 +415,22 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<id>make</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<goals><goal>run</goal></goals>
<configuration>
<target>
<copy toDir="${project.build.directory}/native">
<fileset dir="${basedir}/src/main/native"/>
</copy>
<mkdir dir="${project.build.directory}/native/m4"/>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native"
failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>make-maven-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<phase>compile</phase>
<goals>
<goal>autoreconf</goal>
<goal>configure</goal>
<goal>make-install</goal>
</goals>
<configuration>
<!-- autoreconf settings -->
<workDir>${project.build.directory}/native</workDir>
<arguments>
<argument>-i</argument>
<argument>-f</argument>
</arguments>
<!-- configure settings -->
<configureEnvironment>
<property>
<name>ac_cv_func_malloc_0_nonnull</name>
<value>yes</value>
</property>
<property>
<name>JVM_ARCH</name>
<value>${sun.arch.data.model}</value>
</property>
</configureEnvironment>
<configureOptions>
</configureOptions>
<configureWorkDir>${project.build.directory}/native</configureWorkDir>
<prefix>/usr/local</prefix>
<!-- make settings -->
<installEnvironment>
<property>
<name>ac_cv_func_malloc_0_nonnull</name>
<value>yes</value>
</property>
<property>
<name>JVM_ARCH</name>
<value>${sun.arch.data.model}</value>
</property>
</installEnvironment>
<!-- configure & make settings -->
<destDir>${project.build.directory}/native/target</destDir>
</configuration>
</execution>
<!-- TODO wire here native testcases
<execution>
<id>test</id>
@ -541,7 +487,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<id>kdc</id>
<phase>compile</phase>
<goals>
<goal>run</goal>

View File

@ -0,0 +1,126 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
# Default to release builds
set(CMAKE_BUILD_TYPE, Release)
# If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
# This variable is set by maven.
if (JVM_ARCH_DATA_MODEL EQUAL 32)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32")
set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32")
if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
set(CMAKE_SYSTEM_PROCESSOR "i686")
endif ()
endif (JVM_ARCH_DATA_MODEL EQUAL 32)
# Compile a library with both shared and static variants
function(add_dual_library LIBNAME)
add_library(${LIBNAME} SHARED ${ARGN})
add_library(${LIBNAME}_static STATIC ${ARGN})
set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
endfunction(add_dual_library)
# Link both a static and a dynamic target against some libraries
function(target_link_dual_libraries LIBNAME)
target_link_libraries(${LIBNAME} ${ARGN})
target_link_libraries(${LIBNAME}_static ${ARGN})
endfunction(target_link_dual_libraries)
function(output_directory TGT DIR)
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
endfunction(output_directory TGT DIR)
function(dual_output_directory TGT DIR)
output_directory(${TGT} "${DIR}")
output_directory(${TGT}_static "${DIR}")
endfunction(dual_output_directory TGT DIR)
# Flatten a list into a string.
function(FLATTEN_LIST INPUT SEPARATOR OUTPUT)
string (REPLACE ";" "${SEPARATOR}" _TMPS "${INPUT}")
set (${OUTPUT} "${_TMPS}" PARENT_SCOPE)
endfunction()
find_package(JNI REQUIRED)
if (NOT GENERATED_JAVAH)
# Must identify where the generated headers have been placed
MESSAGE(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH")
endif (NOT GENERATED_JAVAH)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2 -D_GNU_SOURCE")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
include_directories(
${GENERATED_JAVAH}
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_BINARY_DIR}
${JNI_INCLUDE_DIRS}
main/native/
)
set(_FUSE_DFS_VERSION 0.1.0)
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
add_dual_library(hdfs
main/native/hdfs.c
main/native/hdfsJniHelper.c
)
target_link_dual_libraries(hdfs
${JAVA_JVM_LIBRARY}
)
dual_output_directory(hdfs target/usr/local/lib)
set(LIBHDFS_VERSION "0.0.0")
set_target_properties(hdfs PROPERTIES
SOVERSION ${LIBHDFS_VERSION})
add_executable(hdfs_test
main/native/hdfs_test.c
)
target_link_libraries(hdfs_test
hdfs
${JAVA_JVM_LIBRARY}
)
output_directory(hdfs_test target/usr/local/bin)
add_executable(hdfs_read
main/native/hdfs_read.c
)
target_link_libraries(hdfs_read
hdfs
${JAVA_JVM_LIBRARY}
)
output_directory(hdfs_read target/usr/local/bin)
add_executable(hdfs_write
main/native/hdfs_write.c
)
target_link_libraries(hdfs_write
hdfs
${JAVA_JVM_LIBRARY}
)
output_directory(hdfs_write target/usr/local/bin)
add_subdirectory(contrib/fuse-dfs/src)

View File

@ -0,0 +1,6 @@
#ifndef CONFIG_H
#define CONFIG_H
#cmakedefine _FUSE_DFS_VERSION "@_FUSE_DFS_VERSION@"
#endif

View File

@ -1,27 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
@GLOBAL_HEADER_MK@
@PRODUCT_MK@
SUBDIRS = . src
clean:
rm -rf autom4te.cache config.guess config.log config.status config.sub configure depcomp src/.deps install-sh Makefile.in src/Makefile.in src/Makefile missing Makefile src/fuse_dfs.o src/fuse_dfs aclocal.m4
@GLOBAL_FOOTER_MK@

View File

@ -1,270 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
AC_DEFUN([FUSE_DFS_INITIALIZE],
[
AM_INIT_AUTOMAKE([ foreign 1.9.5 no-define ])
if test "x$1" = "xlocalinstall"; then
wdir=`pwd`
# To use $wdir undef quote.
#
##########
AC_PREFIX_DEFAULT([`pwd`/install])
echo
fi
AC_PROG_CC
AC_PROG_CXX
AC_PROG_RANLIB(RANLIB, ranlib)
AC_PATH_PROGS(BASH, bash)
AC_PATH_PROGS(PERL, perl)
AC_PATH_PROGS(PYTHON, python)
AC_PATH_PROGS(AR, ar)
AC_PATH_PROGS(ANT, ant)
PRODUCT_MK=""
])
AC_DEFUN([FUSE_DFS_WITH_EXTERNAL_PATH],
[
cdir=`pwd`
AC_MSG_CHECKING([Checking EXTERNAL_PATH set to])
AC_ARG_WITH([externalpath],
[ --with-externalpath=DIR User specified path to external fuse dfs components.],
[
if test "x${EXTERNAL_PATH}" != "x"; then
echo ""
echo "ERROR: You have already set EXTERNAL_PATH in your environment"
echo "Cannot override it using --with-externalpath. Unset EXTERNAL_PATH to use this option"
exit 1
fi
EXTERNAL_PATH=$withval
],
[
if test "x${EXTERNAL_PATH}" = "x"; then
EXTERNAL_PATH=$1
fi
]
)
if test "x${EXTERNAL_PATH}" = "x"; then
export EXTERNAL_PATH="$cdir/external"
GLOBAL_HEADER_MK="include ${EXTERNAL_PATH}/global_header.mk"
GLOBAL_FOOTER_MK="include ${EXTERNAL_PATH}/global_footer.mk"
else
export EXTERNAL_PATH
GLOBAL_HEADER_MK="include ${EXTERNAL_PATH}/global_header.mk"
GLOBAL_FOOTER_MK="include ${EXTERNAL_PATH}/global_footer.mk"
fi
AC_MSG_RESULT($EXTERNAL_PATH)
if test ! -d ${EXTERNAL_PATH}; then
echo ""
echo "ERROR: EXTERNAL_PATH set to an nonexistent directory ${EXTERNAL_PATH}"
exit 1
fi
AC_SUBST(EXTERNAL_PATH)
AC_SUBST(GLOBAL_HEADER_MK)
AC_SUBST(GLOBAL_FOOTER_MK)
])
# Set option to enable shared mode. Set DEBUG and OPT for use in Makefile.am.
AC_DEFUN([FUSE_DFS_ENABLE_DEFAULT_OPT_BUILD],
[
AC_MSG_CHECKING([whether to enable optimized build])
AC_ARG_ENABLE([opt],
[ --disable-opt Set up debug mode.],
[
ENABLED_OPT=$enableval
],
[
ENABLED_OPT="yes"
]
)
if test "$ENABLED_OPT" = "yes"
then
CFLAGS="-Wall -O3"
CXXFLAGS="-Wall -O3"
else
CFLAGS="-Wall -g"
CXXFLAGS="-Wall -g"
fi
AC_MSG_RESULT($ENABLED_OPT)
AM_CONDITIONAL([OPT], [test "$ENABLED_OPT" = yes])
AM_CONDITIONAL([DEBUG], [test "$ENABLED_OPT" = no])
])
# Set option to enable debug mode. Set DEBUG and OPT for use in Makefile.am.
AC_DEFUN([FUSE_DFS_ENABLE_DEFAULT_DEBUG_BUILD],
[
AC_MSG_CHECKING([whether to enable debug build])
AC_ARG_ENABLE([debug],
[ --disable-debug Set up opt mode.],
[
ENABLED_DEBUG=$enableval
],
[
ENABLED_DEBUG="yes"
]
)
if test "$ENABLED_DEBUG" = "yes"
then
CFLAGS="-Wall -g"
CXXFLAGS="-Wall -g"
else
CFLAGS="-Wall -O3"
CXXFLAGS="-Wall -O3"
fi
AC_MSG_RESULT($ENABLED_DEBUG)
AM_CONDITIONAL([DEBUG], [test "$ENABLED_DEBUG" = yes])
AM_CONDITIONAL([OPT], [test "$ENABLED_DEBUG" = no])
])
# Set option to enable static libs.
AC_DEFUN([FUSE_DFS_ENABLE_DEFAULT_STATIC],
[
SHARED=""
STATIC=""
AC_MSG_CHECKING([whether to enable static mode])
AC_ARG_ENABLE([static],
[ --disable-static Set up shared mode.],
[
ENABLED_STATIC=$enableval
],
[
ENABLED_STATIC="yes"
]
)
if test "$ENABLED_STATIC" = "yes"
then
LTYPE=".a"
else
LTYPE=".so"
SHARED_CXXFLAGS="-fPIC"
SHARED_CFLAGS="-fPIC"
SHARED_LDFLAGS="-shared -fPIC"
AC_SUBST(SHARED_CXXFLAGS)
AC_SUBST(SHARED_CFLAGS)
AC_SUBST(SHARED_LDFLAGS)
fi
AC_MSG_RESULT($ENABLED_STATIC)
AC_SUBST(LTYPE)
AM_CONDITIONAL([STATIC], [test "$ENABLED_STATIC" = yes])
AM_CONDITIONAL([SHARED], [test "$ENABLED_STATIC" = no])
])
# Set option to enable shared libs.
AC_DEFUN([FUSE_DFS_ENABLE_DEFAULT_SHARED],
[
SHARED=""
STATIC=""
AC_MSG_CHECKING([whether to enable shared mode])
AC_ARG_ENABLE([shared],
[ --disable-shared Set up static mode.],
[
ENABLED_SHARED=$enableval
],
[
ENABLED_SHARED="yes"
]
)
if test "$ENABLED_SHARED" = "yes"
then
LTYPE=".so"
SHARED_CXXFLAGS="-fPIC"
SHARED_CFLAGS="-fPIC"
SHARED_LDFLAGS="-shared -fPIC"
AC_SUBST(SHARED_CXXFLAGS)
AC_SUBST(SHARED_CFLAGS)
AC_SUBST(SHARED_LDFLAGS)
else
LTYPE=".a"
fi
AC_MSG_RESULT($ENABLED_SHARED)
AC_SUBST(LTYPE)
AM_CONDITIONAL([SHARED], [test "$ENABLED_SHARED" = yes])
AM_CONDITIONAL([STATIC], [test "$ENABLED_SHARED" = no])
])
# Generates define flags and conditionals as specified by user.
# This gets enabled *only* if user selects --enable-<FEATURE> otion.
AC_DEFUN([FUSE_DFS_ENABLE_FEATURE],
[
ENABLE=""
flag="$1"
value="$3"
AC_MSG_CHECKING([whether to enable $1])
AC_ARG_ENABLE([$2],
[ --enable-$2 Enable $2.],
[
ENABLE=$enableval
],
[
ENABLE="no"
]
)
AM_CONDITIONAL([$1], [test "$ENABLE" = yes])
if test "$ENABLE" = "yes"
then
if test "x${value}" = "x"
then
AC_DEFINE([$1])
else
AC_DEFINE_UNQUOTED([$1], [$value])
fi
fi
AC_MSG_RESULT($ENABLE)
])
# can also use eval $2=$withval;AC_SUBST($2)
AC_DEFUN([FUSE_DFS_WITH_PATH],
[
USRFLAG=""
USRFLAG=$1
AC_MSG_CHECKING([Checking $1 set to])
AC_ARG_WITH([$2],
[ --with-$2=DIR User specified path.],
[
LOC=$withval
eval $USRFLAG=$withval
],
[
LOC=$3
eval $USRFLAG=$3
]
)
AC_SUBST([$1])
AC_MSG_RESULT($LOC)
])
AC_DEFUN([FUSE_DFS_SET_FLAG_VALUE],
[
SETFLAG=""
AC_MSG_CHECKING([Checking $1 set to])
SETFLAG=$1
eval $SETFLAG=\"$2\"
AC_SUBST([$SETFLAG])
AC_MSG_RESULT($2)
])
# NOTES
# if using if else bourne stmt you must have more than a macro in it.
# EX1 is not correct. EX2 is correct
# EX1: if test "$XX" = "yes"; then
# AC_SUBST(xx)
# fi
# EX2: if test "$XX" = "yes"; then
# xx="foo"
# AC_SUBST(xx)
# fi

View File

@ -1,82 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Autoconf input file
# $Id$
# AC - autoconf
#########################################################################
# Section 1:
# DO NOT TOUCH EXCEPT TO CHANGE Product-Name and Rev# IN AC_INIT
AC_PREREQ(2.52)
AC_INIT([fuse_dfs], [0.1.0])
#AC_CONFIG_AUX_DIR([/usr/share/automake-1.9])
# To install locally
AC_CANONICAL_TARGET()
FUSE_DFS_INITIALIZE([localinstall])
AC_PREFIX_DEFAULT([`pwd`])
#case $target in
#*64*intel)
# OS_ARCH=intel64 ;;
#*64*amd* | *64*unknown*)
# OS_ARCH=amd64 ;;
#$esac
#AC_SUBST(OS_ARCH)
DEFS=""
AC_SUBST([DEFS])
# Need GNU source for multiple hashtables from glibc
AC_GNU_SOURCE
AC_FUNC_GETGROUPS
AC_TYPE_GETGROUPS
AC_PROG_CC
AC_SYS_LARGEFILE
############################################################################
# Section 2:
# User Configurable system defaults. Change With CAUTION!
# User can include custom makefile rules. Uncomment and update only <name> in PRODUCT_MK.
# Include where appropriate in any Makefile.am as @PRODUCT_MK@
# Default path to external components and shared build tools
# To point to other locations set environment variable EXTERNAL_PATH.
# DO NOT change default. Changing default value requires changing bootstrap.sh.
FUSE_DFS_WITH_EXTERNAL_PATH([`pwd`])
# Pre-defined macro to set optimized build mode. Configure with --disable-opt option to turn off optimization. Default CXXFLAGS set to '-Wall -O3'. In debug mode CXXFLAGS is '-Wall -g'
# FUSE_DFSENABLE_DEFAULT_DEBUG_BUILD
FUSE_DFS_ENABLE_DEFAULT_OPT_BUILD
# Predefined macro to set static library mode. Configure with --disable-static option to turn off static lib mode.
# FUSE_DFS_ENABLE_DEFAULT_SHARED
FUSE_DFS_ENABLE_DEFAULT_STATIC
AC_CONFIG_FILES(Makefile src/Makefile)
############################################################################
# Section 4:
# DO NOT TOUCH.
AC_SUBST(PRODUCT_MK)
AC_OUTPUT

View File

@ -1,164 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>3.0.0-SNAPSHOT</version>
<relativePath>../../../../../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop.contrib</groupId>
<artifactId>hadoop-hdfs-fuse</artifactId>
<version>3.0.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Apache Hadoop HDFS Fuse</name>
<description>Apache Hadoop HDFS Fuse</description>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<!-- workaround for filtered/unfiltered resources in same directory -->
<!-- remove when maven-eclipse-plugin 2.9 is available -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.6</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<threadCount>1</threadCount>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>javadoc</goal>
</goals>
<phase>site</phase>
<configuration>
<linksource>true</linksource>
<quiet>true</quiet>
<verbose>false</verbose>
<source>${maven.compile.source}</source>
<charset>${maven.compile.encoding}</charset>
<groups>
<group>
<title>HttpFs API</title>
<packages>*</packages>
</group>
</groups>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<executions>
<execution>
<configuration>
<dependencyLocationsEnabled>false</dependencyLocationsEnabled>
</configuration>
<goals>
<goal>dependencies</goal>
</goals>
<phase>site</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>fuse</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>prepare-compile-native</id>
<phase>generate-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy toDir="${project.build.directory}/fuse-dfs">
<fileset dir="${basedir}"/>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>compile-fuse</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<ant antfile="${project.build.directory}/fuse-dfs/build.xml"
dir="${project.build.directory}/fuse-dfs">
<target name="compile"/>
</ant>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,73 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Find Linux FUSE
IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
find_package(PkgConfig REQUIRED)
pkg_check_modules(FUSE fuse)
IF(FUSE_FOUND)
FLATTEN_LIST("${FUSE_CFLAGS}" " " FUSE_CFLAGS)
FLATTEN_LIST("${FUSE_LDFLAGS}" " " FUSE_LDFLAGS)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FUSE_CFLAGS}")
set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} ${FUSE_LDFLAGS}")
MESSAGE(STATUS "Building Linux FUSE client.")
include_directories(${FUSE_INCLUDE_DIRS})
ELSE(FUSE_FOUND)
MESSAGE(STATUS "Failed to find Linux FUSE libraries or include files. Will not build FUSE client.")
ENDIF(FUSE_FOUND)
ELSE (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
MESSAGE(STATUS "Non-Linux system detected. Will not build FUSE client.")
ENDIF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
IF(FUSE_FOUND)
add_executable(fuse_dfs
fuse_dfs.c
fuse_options.c
fuse_connect.c
fuse_impls_access.c
fuse_impls_chmod.c
fuse_impls_chown.c
fuse_impls_create.c
fuse_impls_flush.c
fuse_impls_getattr.c
fuse_impls_mkdir.c
fuse_impls_mknod.c
fuse_impls_open.c
fuse_impls_read.c
fuse_impls_readdir.c
fuse_impls_release.c
fuse_impls_rename.c
fuse_impls_rmdir.c
fuse_impls_statfs.c
fuse_impls_symlink.c
fuse_impls_truncate.c
fuse_impls_unlink.c
fuse_impls_utimens.c
fuse_impls_write.c
fuse_init.c
fuse_stat_struct.c
fuse_trash.c
fuse_users.c
)
target_link_libraries(fuse_dfs
${FUSE_LIBRARIES}
${JAVA_JVM_LIBRARY}
hdfs
m
)
ENDIF(FUSE_FOUND)

View File

@ -1,22 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
bin_PROGRAMS = fuse_dfs
fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c
AM_CFLAGS= -Wall -g
AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/../../src/main/native -I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
AM_LDFLAGS= -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib64 -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib -L$(FUSE_HOME)/lib -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server
fuse_dfs_LDADD=-lfuse -lhdfs -ljvm -lm

View File

@ -31,13 +31,9 @@
#include <fuse.h>
#include <fuse/fuse_opt.h>
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#ifdef HAVE_SETXATTR
#include <sys/xattr.h>
#endif
#include "config.h"
//
// Check if a path is in the mount option supplied protected paths.

View File

@ -1,42 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
@PRODUCT_MK@
#AM_CPPFLAGS = -I$(top_srcdir)
ACLOCAL_AMFLAGS = -I m4
lib_LTLIBRARIES = libhdfs.la
libhdfs_la_SOURCES = hdfs.c hdfsJniHelper.c hdfs.h
#check_PROGRAMS = hdfs_test hdfs_read hdfs_write
check_PROGRAMS = hdfs_test hdfs_read hdfs_write
hdfs_test_SOURCES = hdfs_test.c hdfs.h
hdfs_test_LDADD = ${libdir}/libhdfs.la
hdfs_read_SOURCES = hdfs_read.c
hdfs_read_LDADD = ${libdir}/libhdfs.la
hdfs_write_SOURCES = hdfs_write.c
hdfs_write_LDADD = ${libdir}/libhdfs.la
test: hdfs_test hdfs_read hdfs_write
${LIBHDFS_SRC_DIR}/tests/test-libhdfs.sh
# vim: sw=4: ts=4: noet

View File

@ -1,125 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Autoconf input file
# $Id$
AC_INIT([libhdfs], [0.1.0], omalley@apache.org)
AC_PREFIX_DEFAULT([`pwd`/../install])
AC_CONFIG_AUX_DIR([config])
# Generates Makefile from Makefile.am. Modify when new subdirs are added.
# Change Makefile.am also to add subdirectly.
AM_INIT_AUTOMAKE(foreign no-dist)
AC_CONFIG_FILES(Makefile)
LT_INIT
AC_CONFIG_MACRO_DIR([m4])
dnl -------------------------------------------------------------------------
dnl Check current host (forget about cross compilation) and validate it
dnl against the cache (fail if the cache differs)
dnl -------------------------------------------------------------------------
AP_MSG_HEADER([Current host])
AC_CANONICAL_HOST()
AP_CANONICAL_HOST_CHECK()
dnl -------------------------------------------------------------------------
dnl Check C environment
dnl -------------------------------------------------------------------------
AP_MSG_HEADER([C-Language compilation tools])
AC_PROG_CC()
AC_CHECK_TOOL(RANLIB, ranlib, :)
dnl -------------------------------------------------------------------------
dnl Check if this host is supported
dnl -------------------------------------------------------------------------
AP_MSG_HEADER([Host support])
AP_SUPPORTED_HOST()
if test "$supported_os" = "darwin"
then
if test -z "$JAVA_HOME" -a -d /System/Library/Frameworks/JavaVM.framework/Home; then
JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Home
fi
_prevdir=`/bin/pwd`
if test -n "$JAVA_HOME" -a -d "$JAVA_HOME/include"; then
cd "$JAVA_HOME/include"
elif test -n "$JAVA_HOME" -a -d "$JAVA_HOME/../Headers"; then
cd "$JAVA_HOME/../Headers"
else
cd /System/Library/Frameworks/JavaVM.framework/Headers
fi
CFLAGS="$CFLAGS -m${JVM_ARCH} -I`/bin/pwd -P`"
cd $_prevdir
unset _prevdir
fi
dnl -------------------------------------------------------------------------
dnl Check JAVA environment
dnl -------------------------------------------------------------------------
AP_MSG_HEADER([Java compilation tools])
AP_JAVA()
AP_SABLEVM()
AP_KAFFE()
AP_PROG_JAVAC()
AP_PROG_JAR()
AP_JVM_LIBDIR()
if test "$supported_os" != "darwin"
then
case $host_cpu in
arm*) ;;
*)
CFLAGS="$CFLAGS -m${JVM_ARCH}"
LDFLAGS="$LDFLAGS -m${JVM_ARCH}"
;;
esac
AC_MSG_RESULT([VALUE OF JVM_ARCH IS :$JVM_ARCH])
CFLAGS="$CFLAGS -I$JAVA_HOME/include -I$JAVA_HOME/include/$supported_os"
LDFLAGS="$LDFLAGS -L$LIB_JVM_DIR -ljvm -Wl,-x"
fi
dnl -------------------------------------------------------------------------
dnl Add gcc specific CFLAGS.
dnl -------------------------------------------------------------------------
if test "$GCC" = "yes"
then
CFLAGS="$CFLAGS -Wall -Wstrict-prototypes"
AC_MSG_RESULT([gcc flags added])
fi
dnl -------------------------------------------------------------------------
dnl Add gcc specific CFLAGS.
dnl -------------------------------------------------------------------------
if test -z "$LDCMD"
then
LDCMD="$CC"
fi
AC_SUBST(LDCMD)
AC_PROG_CC
AC_PROG_LIBTOOL
AC_TYPE_SIZE_T
AC_CHECK_FUNCS([strdup strerror strtoul])
AC_CHECK_HEADERS([fcntl.h])
AC_C_CONST
AC_C_VOLATILE
#AC_FUNC_MALLOC
AC_HEADER_STDBOOL
AC_SUBST(PRODUCT_MK)
AC_OUTPUT

View File

@ -1,41 +0,0 @@
dnl
dnl Licensed to the Apache Software Foundation (ASF) under one or more
dnl contributor license agreements. See the NOTICE file distributed with
dnl this work for additional information regarding copyright ownership.
dnl The ASF licenses this file to You under the Apache License, Version 2.0
dnl (the "License"); you may not use this file except in compliance with
dnl the License. You may obtain a copy of the License at
dnl
dnl http://www.apache.org/licenses/LICENSE-2.0
dnl
dnl Unless required by applicable law or agreed to in writing, software
dnl distributed under the License is distributed on an "AS IS" BASIS,
dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
dnl See the License for the specific language governing permissions and
dnl limitations under the License.
dnl
dnl -------------------------------------------------------------------------
dnl Author Pier Fumagalli <mailto:pier.fumagalli@eng.sun.com>
dnl Version $Id$
dnl -------------------------------------------------------------------------
AC_DEFUN([AP_MSG_HEADER],[
printf "*** %s ***\n" "$1" 1>&2
AC_PROVIDE([$0])
])
AC_DEFUN([AP_CANONICAL_HOST_CHECK],[
AC_MSG_CHECKING([cached host system type])
if { test x"${ac_cv_host_system_type+set}" = x"set" &&
test x"$ac_cv_host_system_type" != x"$host" ; }
then
AC_MSG_RESULT([$ac_cv_host_system_type])
AC_MSG_ERROR([remove the \"$cache_file\" file and re-run configure])
else
AC_MSG_RESULT(ok)
ac_cv_host_system_type="$host"
fi
AC_PROVIDE([$0])
])

View File

@ -1,142 +0,0 @@
dnl
dnl Licensed to the Apache Software Foundation (ASF) under one or more
dnl contributor license agreements. See the NOTICE file distributed with
dnl this work for additional information regarding copyright ownership.
dnl The ASF licenses this file to You under the Apache License, Version 2.0
dnl (the "License"); you may not use this file except in compliance with
dnl the License. You may obtain a copy of the License at
dnl
dnl http://www.apache.org/licenses/LICENSE-2.0
dnl
dnl Unless required by applicable law or agreed to in writing, software
dnl distributed under the License is distributed on an "AS IS" BASIS,
dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
dnl See the License for the specific language governing permissions and
dnl limitations under the License.
dnl
dnl -------------------------------------------------------------------------
dnl Author Pier Fumagalli <mailto:pier.fumagalli@eng.sun.com>
dnl Version $Id$
dnl -------------------------------------------------------------------------
AC_DEFUN([AP_PROG_JAVAC_WORKS],[
AC_CACHE_CHECK([wether the Java compiler ($JAVAC) works],ap_cv_prog_javac_works,[
echo "public class Test {}" > Test.java
$JAVAC $JAVACFLAGS Test.java > /dev/null 2>&1
if test $? -eq 0
then
rm -f Test.java Test.class
ap_cv_prog_javac_works=yes
else
rm -f Test.java Test.class
AC_MSG_RESULT(no)
AC_MSG_ERROR([installation or configuration problem: javac cannot compile])
fi
])
])
dnl Check for JAVA compilers.
AC_DEFUN([AP_PROG_JAVAC],[
if test "$SABLEVM" != "NONE"
then
AC_PATH_PROG(JAVACSABLE,javac-sablevm,NONE,$JAVA_HOME/bin)
else
JAVACSABLE="NONE"
fi
if test "$JAVACSABLE" = "NONE"
then
XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH"
AC_PATH_PROG(JAVAC,javac,NONE,$XPATH)
else
AC_PATH_PROG(JAVAC,javac-sablevm,NONE,$JAVA_HOME/bin)
fi
AC_MSG_RESULT([$JAVAC])
if test "$JAVAC" = "NONE"
then
AC_MSG_ERROR([javac not found])
fi
AP_PROG_JAVAC_WORKS()
AC_PROVIDE([$0])
AC_SUBST(JAVAC)
AC_SUBST(JAVACFLAGS)
])
dnl Check for jar archivers.
AC_DEFUN([AP_PROG_JAR],[
if test "$SABLEVM" != "NONE"
then
AC_PATH_PROG(JARSABLE,jar-sablevm,NONE,$JAVA_HOME/bin)
else
JARSABLE="NONE"
fi
if test "$JARSABLE" = "NONE"
then
XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH"
AC_PATH_PROG(JAR,jar,NONE,$XPATH)
else
AC_PATH_PROG(JAR,jar-sablevm,NONE,$JAVA_HOME/bin)
fi
if test "$JAR" = "NONE"
then
AC_MSG_ERROR([jar not found])
fi
AC_PROVIDE([$0])
AC_SUBST(JAR)
])
AC_DEFUN([AP_JAVA],[
AC_ARG_WITH(java,[ --with-java=DIR Specify the location of your JDK installation],[
AC_MSG_CHECKING([JAVA_HOME])
if test -d "$withval"
then
JAVA_HOME="$withval"
AC_MSG_RESULT([$JAVA_HOME])
else
AC_MSG_RESULT([failed])
AC_MSG_ERROR([$withval is not a directory])
fi
AC_SUBST(JAVA_HOME)
])
if test x"$JAVA_HOME" = x
then
AC_MSG_ERROR([Java Home not defined. Rerun with --with-java=[...] parameter])
fi
])
dnl check if the JVM in JAVA_HOME is sableVM
dnl $JAVA_HOME/bin/sablevm and /opt/java/lib/sablevm/bin are tested.
AC_DEFUN([AP_SABLEVM],[
if test x"$JAVA_HOME" != x
then
AC_PATH_PROG(SABLEVM,sablevm,NONE,$JAVA_HOME/bin)
if test "$SABLEVM" = "NONE"
then
dnl java may be SableVM.
if $JAVA_HOME/bin/java -version 2> /dev/null | grep SableVM > /dev/null
then
SABLEVM=$JAVA_HOME/bin/java
fi
fi
if test "$SABLEVM" != "NONE"
then
AC_MSG_RESULT([Using sableVM: $SABLEVM])
CFLAGS="$CFLAGS -DHAVE_SABLEVM"
fi
fi
])
dnl check if the JVM in JAVA_HOME is kaffe
dnl $JAVA_HOME/bin/kaffe is tested.
AC_DEFUN([AP_KAFFE],[
if test x"$JAVA_HOME" != x
then
AC_PATH_PROG(KAFFEVM,kaffe,NONE,$JAVA_HOME/bin)
if test "$KAFFEVM" != "NONE"
then
AC_MSG_RESULT([Using kaffe: $KAFFEVM])
CFLAGS="$CFLAGS -DHAVE_KAFFEVM"
LDFLAGS="$LDFLAGS -Wl,-rpath $JAVA_HOME/jre/lib/$HOST_CPU -L $JAVA_HOME/jre/lib/$HOST_CPU -lkaffevm"
fi
fi
])

View File

@ -1,168 +0,0 @@
dnl
dnl Licensed to the Apache Software Foundation (ASF) under one or more
dnl contributor license agreements. See the NOTICE file distributed with
dnl this work for additional information regarding copyright ownership.
dnl The ASF licenses this file to You under the Apache License, Version 2.0
dnl (the "License"); you may not use this file except in compliance with
dnl the License. You may obtain a copy of the License at
dnl
dnl http://www.apache.org/licenses/LICENSE-2.0
dnl
dnl Unless required by applicable law or agreed to in writing, software
dnl distributed under the License is distributed on an "AS IS" BASIS,
dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
dnl See the License for the specific language governing permissions and
dnl limitations under the License.
dnl
dnl -------------------------------------------------------------------------
dnl Author Pier Fumagalli <mailto:pier.fumagalli@eng.sun.com>
dnl Version $Id$
dnl -------------------------------------------------------------------------
AC_DEFUN([AP_SUPPORTED_HOST],[
AC_MSG_CHECKING([C flags dependant on host system type])
case $host_os in
darwin*)
CFLAGS="$CFLAGS -DOS_DARWIN -DDSO_DYLD"
supported_os="darwin"
;;
solaris*)
CFLAGS="$CFLAGS -DOS_SOLARIS -DDSO_DLFCN"
supported_os="solaris"
LIBS="$LIBS -ldl -lthread"
;;
linux*)
CFLAGS="$CFLAGS -DOS_LINUX -DDSO_DLFCN"
supported_os="linux"
LIBS="$LIBS -ldl -lpthread"
;;
cygwin)
CFLAGS="$CFLAGS -DOS_CYGWIN -DDSO_DLFCN -DNO_SETSID"
supported_os="win32"
;;
sysv)
CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN"
LIBS="$LIBS -ldl"
;;
sysv4)
CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN -Kthread"
LDFLAGS="-Kthread $LDFLAGS"
LIBS="$LIBS -ldl"
;;
freebsd*)
CFLAGS="$CFLAGS -DOS_FREEBSD -DDSO_DLFCN -D_THREAD_SAFE -pthread"
LDFLAGS="-pthread $LDFLAGS"
supported_os="freebsd"
;;
osf5*)
CFLAGS="$CFLAGS -pthread -DOS_TRU64 -DDSO_DLFCN -D_XOPEN_SOURCE_EXTENDED"
LDFLAGS="$LDFLAGS -pthread"
;;
hpux11*)
CFLAGS="$CFLAGS -pthread -DOS_HPUX -DDSO_DLFCN"
LDFLAGS="$LDFLAGS -pthread"
LIBS="$LIBS -lpthread"
;;
*)
AC_MSG_RESULT([failed])
AC_MSG_ERROR([Unsupported operating system "$host_os"]);;
esac
case $host_cpu in
powerpc*)
CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\""
HOST_CPU=$host_cpu;;
sparc*)
CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\""
HOST_CPU=$host_cpu;;
i?86)
CFLAGS="$CFLAGS -DCPU=\\\"i386\\\""
HOST_CPU=i386;;
x86_64)
CFLAGS="$CFLAGS -DCPU=\\\"amd64\\\""
HOST_CPU=amd64;;
bs2000)
CFLAGS="$CFLAGS -DCPU=\\\"osd\\\" -DCHARSET_EBCDIC -DOSD_POSIX"
supported_os="osd"
LDFLAGS="-Kno_link_stdlibs -B llm4"
LIBS="$LIBS -lBLSLIB"
LDCMD="/opt/C/bin/cc"
HOST_CPU=osd;;
mips)
CFLAGS="$CFLAGS -DCPU=\\\"mips\\\""
supported_os="mips"
HOST_CPU=mips;;
alpha*)
CFLAGS="$CFLAGS -DCPU=\\\"alpha\\\""
supported_os="alpha"
HOST_CPU=alpha;;
hppa2.0w)
CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0W\\\""
supported_os="hp-ux"
HOST_CPU=PA_RISC2.0W;;
hppa2.0)
CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0\\\""
supported_os="hp-ux"
HOST_CPU=PA_RISC2.0;;
mipsel)
CFLAGS="$CFLAGS -DCPU=\\\"mipsel\\\""
supported_os="mipsel"
HOST_CPU=mipsel;;
ia64)
CFLAGS="$CFLAGS -DCPU=\\\"ia64\\\""
supported_os="ia64"
HOST_CPU=ia64;;
s390)
CFLAGS="$CFLAGS -DCPU=\\\"s390\\\""
supported_os="s390"
HOST_CPU=s390;;
arm*)
CFLAGS="$CFLAGS -DCPU=\\\"arm\\\""
supported_os="arm"
HOST_CPU=arm;;
*)
AC_MSG_RESULT([failed])
AC_MSG_ERROR([Unsupported CPU architecture "$host_cpu"]);;
esac
AC_MSG_RESULT([ok])
AC_SUBST(CFLAGS)
AC_SUBST(LDFLAGS)
])
AC_DEFUN([AP_JVM_LIBDIR],[
AC_MSG_CHECKING([where on earth this jvm library is..])
javabasedir=$JAVA_HOME
case $host_os in
cygwin* | mingw* | pw23* )
lib_jvm_dir=`find $javabasedir -follow \( \
\( -name client -type d -prune \) -o \
\( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
;;
aix*)
lib_jvm_dir=`find $javabasedir \( \
\( -name client -type d -prune \) -o \
\( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
if test -z "$lib_jvm_dir"; then
lib_jvm_dir=`find $javabasedir \( \
\( -name client -type d -prune \) -o \
\( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
fi
;;
*)
lib_jvm_dir=`find $javabasedir -follow \( \
\( -name client -type d -prune \) -o \
\( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
if test -z "$lib_jvm_dir"; then
lib_jvm_dir=`find $javabasedir -follow \( \
\( -name client -type d -prune \) -o \
\( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
fi
;;
esac
LIB_JVM_DIR=$lib_jvm_dir
AC_MSG_RESULT([ohh u there ... $LIB_JVM_DIR])
AC_SUBST(LIB_JVM_DIR)
])

View File

@ -34,7 +34,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<module>hadoop-hdfs</module>
<module>hadoop-hdfs-httpfs</module>
<module>hadoop-hdfs/src/contrib/bkjournal</module>
<module>hadoop-hdfs/src/contrib/fuse-dfs</module>
</modules>
<build>

View File

@ -47,47 +47,37 @@
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>make-maven-plugin</artifactId>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>compile</id>
<id>make</id>
<phase>compile</phase>
<goals>
<goal>autoreconf</goal>
<goal>configure</goal>
<goal>make-install</goal>
</goals>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native/target"/>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
<arg line="${basedir}/src/ -DHADOOP_CONF_DIR=${container-executor.conf.dir} -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
<env key="CFLAGS" value="${container-executor.additional_cflags}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
</target>
</configuration>
</execution>
<execution>
<id>test</id>
<id>native_tests</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<target>
<exec executable="test-container-executor" dir="${project.build.directory}/native" failonerror="true">
</exec>
</target>
</configuration>
</execution>
</executions>
<configuration>
<!-- autoreconf settings -->
<workDir>${project.build.directory}/native/container-executor</workDir>
<arguments>
<argument>-i</argument>
</arguments>
<!-- configure settings -->
<configureEnvironment>
<property>
<name>CFLAGS</name>
<value>-DHADOOP_CONF_DIR=${container-executor.conf.dir} ${container-executor.additional_cflags}</value>
</property>
</configureEnvironment>
<configureWorkDir>${project.build.directory}/native/container-executor</configureWorkDir>
<prefix>/usr/local</prefix>
<!-- configure & make settings -->
<destDir>${project.build.directory}/native/target</destDir>
</configuration>
</plugin>
</plugins>
</build>
@ -172,14 +162,6 @@
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native"/>
<copy toDir="${project.build.directory}/native">
<fileset dir="${basedir}/src/main/native"/>
</copy>
</target>
</configuration>
</execution>
</executions>
</plugin>

View File

@ -0,0 +1,69 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
set(CMAKE_BUILD_TYPE, Release)
if (JVM_ARCH_DATA_MODEL EQUAL 32)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32")
set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32")
if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
set(CMAKE_SYSTEM_PROCESSOR "i686")
endif ()
endif (JVM_ARCH_DATA_MODEL EQUAL 32)
function(output_directory TGT DIR)
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
SET_TARGET_PROPERTIES(${TGT} PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
endfunction(output_directory TGT DIR)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2 -D_GNU_SOURCE")
# note: can't enable -D_LARGEFILE: see MAPREDUCE-4258
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT")
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_BINARY_DIR}
main/native/container-executor
main/native/container-executor/impl
)
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
add_library(container
main/native/container-executor/impl/configuration.c
main/native/container-executor/impl/container-executor.c
)
add_executable(container-executor
main/native/container-executor/impl/main.c
)
target_link_libraries(container-executor
container
)
output_directory(container-executor target/usr/local/bin)
add_executable(test-container-executor
main/native/container-executor/test/test-container-executor.c
)
target_link_libraries(test-container-executor
container
)
output_directory(test-container-executor target/usr/local/bin)

View File

@ -0,0 +1,6 @@
#ifndef CONFIG_H
#define CONFIG_H
#cmakedefine HADOOP_CONF_DIR "@HADOOP_CONF_DIR@"
#endif

View File

@ -1,42 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# autom4te configuration for hadoop utils library
#
begin-language: "Autoheader-preselections"
args: --no-cache
end-language: "Autoheader-preselections"
begin-language: "Automake-preselections"
args: --no-cache
end-language: "Automake-preselections"
begin-language: "Autoreconf-preselections"
args: --no-cache
end-language: "Autoreconf-preselections"
begin-language: "Autoconf-without-aclocal-m4"
args: --no-cache
end-language: "Autoconf-without-aclocal-m4"
begin-language: "Autoconf"
args: --no-cache
end-language: "Autoconf"

View File

@ -1,32 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
AM_CFLAGS=-I$(srcdir)/impl -Wall -g -Werror
# Define the programs that need to be built
bin_PROGRAMS = container-executor
check_PROGRAMS = test-container-executor
TESTS = test-container-executor
# Define the sources for the common files
common_SOURCES = impl/configuration.c impl/container-executor.c
# Define the sources for the real executable
container_executor_SOURCES = $(common_SOURCES) impl/main.c
# Define the sources for the test executable
test_container_executor_SOURCES = $(common_SOURCES) test/test-container-executor.c

View File

@ -1,54 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
AC_PREREQ(2.59)
AC_INIT(linux-container-executor, 1.0.0, mapreduce-dev@hadoop.apache.org)
AC_GNU_SOURCE
#AC_SYS_LARGEFILE
AM_INIT_AUTOMAKE([subdir-objects foreign no-dist])
AC_CONFIG_SRCDIR([impl/container-executor.c])
AC_CONFIG_FILES([Makefile])
AC_PREFIX_DEFAULT(`pwd`/../install)
CHECK_INSTALL_CFLAG
HADOOP_UTILS_SETUP
# Checks for programs.
AC_PROG_CC
AM_PROG_CC_C_O
# Checks for libraries.
# Checks for header files.
AC_LANG(C)
AC_CHECK_HEADERS([unistd.h])
# Checks for typedefs, structures, and compiler characteristics.
AC_HEADER_STDBOOL
AC_C_CONST
AC_TYPE_OFF_T
AC_TYPE_SIZE_T
AC_FUNC_STRERROR_R
# Checks for library functions.
AC_CHECK_FUNCS([mkdir uname])
AC_OUTPUT

View File

@ -16,6 +16,7 @@
* limitations under the License.
*/
#include "config.h"
#include "configuration.h"
#include "container-executor.h"
@ -29,8 +30,6 @@
#include <string.h>
#include <sys/stat.h>
#define _STRINGIFY(X) #X
#define STRINGIFY(X) _STRINGIFY(X)
#define CONF_FILENAME "container-executor.cfg"
// When building as part of a Maven build this value gets defined by using
@ -101,7 +100,7 @@ int main(int argc, char **argv) {
char *executable_file = get_executable();
char *orig_conf_file = STRINGIFY(HADOOP_CONF_DIR) "/" CONF_FILENAME;
char *orig_conf_file = HADOOP_CONF_DIR "/" CONF_FILENAME;
char *conf_file = resolve_config_path(orig_conf_file, argv[0]);
char *local_dirs, *log_dirs;