HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via bobby)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1401321 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a92313f7bd
commit
13422461f3
|
@ -1094,6 +1094,9 @@ Release 0.23.5 - UNRELEASED
|
|||
HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via
|
||||
jlowe)
|
||||
|
||||
HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via
|
||||
bobby)
|
||||
|
||||
Release 0.23.4 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -67,6 +67,9 @@ macro(set_find_shared_library_version LVERS)
|
|||
IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
# Mac OS uses .dylib
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
|
||||
ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
|
||||
# FreeBSD has always .so installed.
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
|
||||
ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||
# Windows doesn't support finding shared libraries by version.
|
||||
ELSE()
|
||||
|
@ -95,8 +98,10 @@ GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
|
|||
|
||||
INCLUDE(CheckFunctionExists)
|
||||
INCLUDE(CheckCSourceCompiles)
|
||||
INCLUDE(CheckLibraryExists)
|
||||
CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
|
||||
CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
|
||||
CHECK_LIBRARY_EXISTS(dl dlopen "" NEED_LINK_DL)
|
||||
|
||||
SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
set_find_shared_library_version("1")
|
||||
|
@ -159,6 +164,9 @@ add_dual_library(hadoop
|
|||
${D}/util/NativeCrc32.c
|
||||
${D}/util/bulk_crc32.c
|
||||
)
|
||||
if (NEED_LINK_DL)
|
||||
set(LIB_DL dl)
|
||||
endif (NEED_LINK_DL)
|
||||
|
||||
IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
|
||||
#
|
||||
|
@ -171,7 +179,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
|
|||
ENDIF()
|
||||
|
||||
target_link_dual_libraries(hadoop
|
||||
dl
|
||||
${LIB_DL}
|
||||
${JAVA_JVM_LIBRARY}
|
||||
)
|
||||
SET(LIBHADOOP_VERSION "1.0.0")
|
||||
|
|
|
@ -43,7 +43,8 @@ public class HardLink {
|
|||
OS_TYPE_UNIX,
|
||||
OS_TYPE_WINXP,
|
||||
OS_TYPE_SOLARIS,
|
||||
OS_TYPE_MAC
|
||||
OS_TYPE_MAC,
|
||||
OS_TYPE_FREEBSD
|
||||
}
|
||||
|
||||
public static OSType osType;
|
||||
|
@ -63,7 +64,7 @@ public class HardLink {
|
|||
getHardLinkCommand = new HardLinkCGUnix();
|
||||
//override getLinkCountCommand for the particular Unix variant
|
||||
//Linux is already set as the default - {"stat","-c%h", null}
|
||||
if (osType == OSType.OS_TYPE_MAC) {
|
||||
if (osType == OSType.OS_TYPE_MAC || osType == OSType.OS_TYPE_FREEBSD) {
|
||||
String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
|
||||
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
|
||||
} else if (osType == OSType.OS_TYPE_SOLARIS) {
|
||||
|
@ -95,6 +96,9 @@ public class HardLink {
|
|||
else if (osName.contains("Mac")) {
|
||||
return OSType.OS_TYPE_MAC;
|
||||
}
|
||||
else if (osName.contains("FreeBSD")) {
|
||||
return OSType.OS_TYPE_FREEBSD;
|
||||
}
|
||||
else {
|
||||
return OSType.OS_TYPE_UNIX;
|
||||
}
|
||||
|
|
|
@ -254,7 +254,11 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_posix_1fadvise(
|
|||
|
||||
int err = 0;
|
||||
if ((err = posix_fadvise(fd, (off_t)offset, (off_t)len, flags))) {
|
||||
#ifdef __FreeBSD__
|
||||
throw_ioe(env, errno);
|
||||
#else
|
||||
throw_ioe(env, err);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
@ -310,6 +314,22 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_sync_1file_1range(
|
|||
#endif
|
||||
}
|
||||
|
||||
#ifdef __FreeBSD__
|
||||
static int toFreeBSDFlags(int flags)
|
||||
{
|
||||
int rc = flags & 03;
|
||||
if ( flags & 0100 ) rc |= O_CREAT;
|
||||
if ( flags & 0200 ) rc |= O_EXCL;
|
||||
if ( flags & 0400 ) rc |= O_NOCTTY;
|
||||
if ( flags & 01000 ) rc |= O_TRUNC;
|
||||
if ( flags & 02000 ) rc |= O_APPEND;
|
||||
if ( flags & 04000 ) rc |= O_NONBLOCK;
|
||||
if ( flags &010000 ) rc |= O_SYNC;
|
||||
if ( flags &020000 ) rc |= O_ASYNC;
|
||||
return rc;
|
||||
}
|
||||
#endif
|
||||
|
||||
/*
|
||||
* public static native FileDescriptor open(String path, int flags, int mode);
|
||||
*/
|
||||
|
@ -318,6 +338,9 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_open(
|
|||
JNIEnv *env, jclass clazz, jstring j_path,
|
||||
jint flags, jint mode)
|
||||
{
|
||||
#ifdef __FreeBSD__
|
||||
flags = toFreeBSDFlags(flags);
|
||||
#endif
|
||||
jobject ret = NULL;
|
||||
|
||||
const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
|
||||
|
@ -399,7 +422,7 @@ err:
|
|||
* Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
|
||||
*/
|
||||
ssize_t get_pw_buflen() {
|
||||
size_t ret = 0;
|
||||
long ret = 0;
|
||||
#ifdef _SC_GETPW_R_SIZE_MAX
|
||||
ret = sysconf(_SC_GETPW_R_SIZE_MAX);
|
||||
#endif
|
||||
|
|
|
@ -46,6 +46,7 @@ JNIEXPORT jobjectArray JNICALL
|
|||
Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNetgroupJNI
|
||||
(JNIEnv *env, jobject jobj, jstring jgroup) {
|
||||
UserList *userListHead = NULL;
|
||||
UserList *current = NULL;
|
||||
int userListSize = 0;
|
||||
|
||||
// pointers to free at the end
|
||||
|
@ -72,8 +73,10 @@ Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNet
|
|||
// was successful or not (as long as it was called we need to call
|
||||
// endnetgrent)
|
||||
setnetgrentCalledFlag = 1;
|
||||
#ifndef __FreeBSD__
|
||||
if(setnetgrent(cgroup) == 1) {
|
||||
UserList *current = NULL;
|
||||
#endif
|
||||
current = NULL;
|
||||
// three pointers are for host, user, domain, we only care
|
||||
// about user now
|
||||
char *p[3];
|
||||
|
@ -87,7 +90,9 @@ Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNet
|
|||
userListSize++;
|
||||
}
|
||||
}
|
||||
#ifndef __FreeBSD__
|
||||
}
|
||||
#endif
|
||||
|
||||
//--------------------------------------------------
|
||||
// build return data (java array)
|
||||
|
@ -101,7 +106,7 @@ Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNet
|
|||
goto END;
|
||||
}
|
||||
|
||||
UserList * current = NULL;
|
||||
current = NULL;
|
||||
|
||||
// note that the loop iterates over list but also over array (i)
|
||||
int i = 0;
|
||||
|
|
|
@ -78,7 +78,7 @@ int getGroupIDList(const char *user, int *ngroups, gid_t **groups) {
|
|||
*/
|
||||
int getGroupDetails(gid_t group, char **grpBuf) {
|
||||
struct group * grp = NULL;
|
||||
size_t currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
|
||||
long currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
|
||||
if (currBufferSize < 1024) {
|
||||
currBufferSize = 1024;
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ int getGroupDetails(gid_t group, char **grpBuf) {
|
|||
*/
|
||||
int getPW(const char *user, char **pwbuf) {
|
||||
struct passwd *pwbufp = NULL;
|
||||
size_t currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
|
||||
long currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
|
||||
if (currBufferSize < 1024) {
|
||||
currBufferSize = 1024;
|
||||
}
|
||||
|
|
|
@ -32,7 +32,9 @@
|
|||
#include "bulk_crc32.h"
|
||||
#include "gcc_optimizations.h"
|
||||
|
||||
#ifndef __FreeBSD__
|
||||
#define USE_PIPELINED
|
||||
#endif
|
||||
|
||||
#define CRC_INITIAL_VAL 0xffffffff
|
||||
|
||||
|
@ -260,7 +262,7 @@ static uint32_t crc32_zlib_sb8(
|
|||
// Begin code for SSE4.2 specific hardware support of CRC32C
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__)
|
||||
#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__) && !defined(__FreeBSD__)
|
||||
# define SSE42_FEATURE_BIT (1 << 20)
|
||||
# define CPUID_FEATURES 1
|
||||
/**
|
||||
|
|
|
@ -364,8 +364,12 @@ public class TestHardLink {
|
|||
callCount = createHardLinkMult(src, fileNames, tgt_mult, maxLength);
|
||||
//check the request was completed in exactly two "chunks"
|
||||
assertEquals(2, callCount);
|
||||
String[] tgt_multNames = tgt_mult.list();
|
||||
//sort directory listings before comparsion
|
||||
Arrays.sort(fileNames);
|
||||
Arrays.sort(tgt_multNames);
|
||||
//and check the results were as expected in the dir tree
|
||||
assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list()));
|
||||
assertArrayEquals(fileNames, tgt_multNames);
|
||||
|
||||
//Test the case where maxlength is too small even for one filename.
|
||||
//It should go ahead and try the single files.
|
||||
|
@ -382,8 +386,12 @@ public class TestHardLink {
|
|||
maxLength);
|
||||
//should go ahead with each of the three single file names
|
||||
assertEquals(3, callCount);
|
||||
//check the results were as expected in the dir tree
|
||||
assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list()));
|
||||
tgt_multNames = tgt_mult.list();
|
||||
//sort directory listings before comparsion
|
||||
Arrays.sort(fileNames);
|
||||
Arrays.sort(tgt_multNames);
|
||||
//and check the results were as expected in the dir tree
|
||||
assertArrayEquals(fileNames, tgt_multNames);
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -224,7 +224,10 @@ public class TestNativeIO {
|
|||
// we should just skip the unit test on machines where we don't
|
||||
// have fadvise support
|
||||
assumeTrue(false);
|
||||
} finally {
|
||||
} catch (NativeIOException nioe) {
|
||||
// ignore this error as FreeBSD returns EBADF even if length is zero
|
||||
}
|
||||
finally {
|
||||
fis.close();
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue