From a85371a85f963c7a6175efb2b38a8d7702fc646e Mon Sep 17 00:00:00 2001 From: Robert Joseph Evans Date: Tue, 23 Oct 2012 15:37:38 +0000 Subject: [PATCH] svn merge -c 1401321 FIXES: HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via bobby) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1401323 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 +++ .../hadoop-common/src/CMakeLists.txt | 10 +++++++- .../java/org/apache/hadoop/fs/HardLink.java | 8 ++++-- .../org/apache/hadoop/io/nativeio/NativeIO.c | 25 ++++++++++++++++++- .../JniBasedUnixGroupsNetgroupMapping.c | 9 +++++-- .../src/org/apache/hadoop/security/getGroup.c | 4 +-- .../src/org/apache/hadoop/util/bulk_crc32.c | 4 ++- .../org/apache/hadoop/fs/TestHardLink.java | 14 ++++++++--- .../hadoop/io/nativeio/TestNativeIO.java | 5 +++- 9 files changed, 69 insertions(+), 13 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index a1f73175423..c5e0c6892e0 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -824,6 +824,9 @@ Release 0.23.5 - UNRELEASED HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via jlowe) + HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via + bobby) + Release 0.23.4 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt index 68c63abae3b..c7f05e5c3bc 100644 --- a/hadoop-common-project/hadoop-common/src/CMakeLists.txt +++ b/hadoop-common-project/hadoop-common/src/CMakeLists.txt @@ -67,6 +67,9 @@ macro(set_find_shared_library_version LVERS) IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") # Mac OS uses .dylib SET(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib") + ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD") + # FreeBSD has always .so installed. + SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so") ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows") # Windows doesn't support finding shared libraries by version. ELSE() @@ -95,8 +98,10 @@ GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME) INCLUDE(CheckFunctionExists) INCLUDE(CheckCSourceCompiles) +INCLUDE(CheckLibraryExists) CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE) CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE) +CHECK_LIBRARY_EXISTS(dl dlopen "" NEED_LINK_DL) SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES) set_find_shared_library_version("1") @@ -159,6 +164,9 @@ add_dual_library(hadoop ${D}/util/NativeCrc32.c ${D}/util/bulk_crc32.c ) +if (NEED_LINK_DL) + set(LIB_DL dl) +endif (NEED_LINK_DL) IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") # @@ -171,7 +179,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") ENDIF() target_link_dual_libraries(hadoop - dl + ${LIB_DL} ${JAVA_JVM_LIBRARY} ) SET(LIBHADOOP_VERSION "1.0.0") diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java index eba1e0c6c8b..2ea115bbaa7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java @@ -43,7 +43,8 @@ public enum OSType { OS_TYPE_UNIX, OS_TYPE_WINXP, OS_TYPE_SOLARIS, - OS_TYPE_MAC + OS_TYPE_MAC, + OS_TYPE_FREEBSD } public static OSType osType; @@ -63,7 +64,7 @@ public enum OSType { getHardLinkCommand = new HardLinkCGUnix(); //override getLinkCountCommand for the particular Unix variant //Linux is already set as the default - {"stat","-c%h", null} - if (osType == OSType.OS_TYPE_MAC) { + if (osType == OSType.OS_TYPE_MAC || osType == OSType.OS_TYPE_FREEBSD) { String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null}; HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate); } else if (osType == OSType.OS_TYPE_SOLARIS) { @@ -95,6 +96,9 @@ else if (osName.contains("SunOS") else if (osName.contains("Mac")) { return OSType.OS_TYPE_MAC; } + else if (osName.contains("FreeBSD")) { + return OSType.OS_TYPE_FREEBSD; + } else { return OSType.OS_TYPE_UNIX; } diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c index 139ddafecaa..4a91d0af954 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c @@ -254,7 +254,11 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_posix_1fadvise( int err = 0; if ((err = posix_fadvise(fd, (off_t)offset, (off_t)len, flags))) { +#ifdef __FreeBSD__ + throw_ioe(env, errno); +#else throw_ioe(env, err); +#endif } #endif } @@ -310,6 +314,22 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_sync_1file_1range( #endif } +#ifdef __FreeBSD__ +static int toFreeBSDFlags(int flags) +{ + int rc = flags & 03; + if ( flags & 0100 ) rc |= O_CREAT; + if ( flags & 0200 ) rc |= O_EXCL; + if ( flags & 0400 ) rc |= O_NOCTTY; + if ( flags & 01000 ) rc |= O_TRUNC; + if ( flags & 02000 ) rc |= O_APPEND; + if ( flags & 04000 ) rc |= O_NONBLOCK; + if ( flags &010000 ) rc |= O_SYNC; + if ( flags &020000 ) rc |= O_ASYNC; + return rc; +} +#endif + /* * public static native FileDescriptor open(String path, int flags, int mode); */ @@ -318,6 +338,9 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_open( JNIEnv *env, jclass clazz, jstring j_path, jint flags, jint mode) { +#ifdef __FreeBSD__ + flags = toFreeBSDFlags(flags); +#endif jobject ret = NULL; const char *path = (*env)->GetStringUTFChars(env, j_path, NULL); @@ -399,7 +422,7 @@ err: * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r */ ssize_t get_pw_buflen() { - size_t ret = 0; + long ret = 0; #ifdef _SC_GETPW_R_SIZE_MAX ret = sysconf(_SC_GETPW_R_SIZE_MAX); #endif diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c index 39458f36177..1177d728221 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c @@ -46,6 +46,7 @@ JNIEXPORT jobjectArray JNICALL Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNetgroupJNI (JNIEnv *env, jobject jobj, jstring jgroup) { UserList *userListHead = NULL; + UserList *current = NULL; int userListSize = 0; // pointers to free at the end @@ -72,8 +73,10 @@ Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNet // was successful or not (as long as it was called we need to call // endnetgrent) setnetgrentCalledFlag = 1; +#ifndef __FreeBSD__ if(setnetgrent(cgroup) == 1) { - UserList *current = NULL; +#endif + current = NULL; // three pointers are for host, user, domain, we only care // about user now char *p[3]; @@ -87,7 +90,9 @@ Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNet userListSize++; } } +#ifndef __FreeBSD__ } +#endif //-------------------------------------------------- // build return data (java array) @@ -101,7 +106,7 @@ Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNet goto END; } - UserList * current = NULL; + current = NULL; // note that the loop iterates over list but also over array (i) int i = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c index 2b558c54fe9..f19ec79e16b 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c @@ -78,7 +78,7 @@ int getGroupIDList(const char *user, int *ngroups, gid_t **groups) { */ int getGroupDetails(gid_t group, char **grpBuf) { struct group * grp = NULL; - size_t currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX); + long currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX); if (currBufferSize < 1024) { currBufferSize = 1024; } @@ -123,7 +123,7 @@ int getGroupDetails(gid_t group, char **grpBuf) { */ int getPW(const char *user, char **pwbuf) { struct passwd *pwbufp = NULL; - size_t currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX); + long currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX); if (currBufferSize < 1024) { currBufferSize = 1024; } diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c index 7009bf1f5cc..74f79dd35dd 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c @@ -32,7 +32,9 @@ #include "bulk_crc32.h" #include "gcc_optimizations.h" +#ifndef __FreeBSD__ #define USE_PIPELINED +#endif #define CRC_INITIAL_VAL 0xffffffff @@ -260,7 +262,7 @@ static uint32_t crc32_zlib_sb8( // Begin code for SSE4.2 specific hardware support of CRC32C /////////////////////////////////////////////////////////////////////////// -#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__) +#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__) && !defined(__FreeBSD__) # define SSE42_FEATURE_BIT (1 << 20) # define CPUID_FEATURES 1 /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java index ff1d099438b..3b769472466 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java @@ -364,8 +364,12 @@ public void testCreateHardLinkMultOversizeAndEmpty() throws IOException { callCount = createHardLinkMult(src, fileNames, tgt_mult, maxLength); //check the request was completed in exactly two "chunks" assertEquals(2, callCount); + String[] tgt_multNames = tgt_mult.list(); + //sort directory listings before comparsion + Arrays.sort(fileNames); + Arrays.sort(tgt_multNames); //and check the results were as expected in the dir tree - assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list())); + assertArrayEquals(fileNames, tgt_multNames); //Test the case where maxlength is too small even for one filename. //It should go ahead and try the single files. @@ -382,8 +386,12 @@ public void testCreateHardLinkMultOversizeAndEmpty() throws IOException { maxLength); //should go ahead with each of the three single file names assertEquals(3, callCount); - //check the results were as expected in the dir tree - assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list())); + tgt_multNames = tgt_mult.list(); + //sort directory listings before comparsion + Arrays.sort(fileNames); + Arrays.sort(tgt_multNames); + //and check the results were as expected in the dir tree + assertArrayEquals(fileNames, tgt_multNames); } /* diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index acd728b0ecb..78003249d02 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -224,7 +224,10 @@ public void testPosixFadvise() throws Exception { // we should just skip the unit test on machines where we don't // have fadvise support assumeTrue(false); - } finally { + } catch (NativeIOException nioe) { + // ignore this error as FreeBSD returns EBADF even if length is zero + } + finally { fis.close(); }