HDFS-14818. Check native pmdk lib by 'hadoop checknative' command. Contributed by Feilong He.
This commit is contained in:
parent
a94aa1ff46
commit
659c88801d
|
@ -170,7 +170,7 @@ if(REQUIRE_PMDK)
|
|||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
|
||||
if(PMDK_LIBRARY)
|
||||
GET_FILENAME_COMPONENT(HADOOP_PMDK_LIBRARY ${PMDK_LIBRARY} NAME)
|
||||
GET_FILENAME_COMPONENT(HADOOP_PMDK_LIBRARY ${PMDK_LIBRARY} REALPATH)
|
||||
set(PMDK_SOURCE_FILES ${SRC}/io/nativeio/pmdk_load.c)
|
||||
else(PMDK_LIBRARY)
|
||||
MESSAGE(FATAL_ERROR "The required PMDK library is NOT found. PMDK_LIBRARY=${PMDK_LIBRARY}")
|
||||
|
|
|
@ -120,16 +120,19 @@ public class NativeIO {
|
|||
public String getMessage() {
|
||||
String msg;
|
||||
switch (stateCode) {
|
||||
// -1 represents UNSUPPORTED.
|
||||
case -1:
|
||||
msg = "The native code is built without PMDK support.";
|
||||
msg = "The native code was built without PMDK support.";
|
||||
break;
|
||||
// 1 represents PMDK_LIB_NOT_FOUND.
|
||||
case 1:
|
||||
msg = "The native code is built with PMDK support, but PMDK libs " +
|
||||
"are NOT found in execution environment or failed to be loaded.";
|
||||
msg = "The native code was built with PMDK support, but PMDK libs " +
|
||||
"were NOT found in execution environment or failed to be loaded.";
|
||||
break;
|
||||
// 0 represents SUPPORTED.
|
||||
case 0:
|
||||
msg = "The native code is built with PMDK support, and PMDK libs " +
|
||||
"are loaded successfully.";
|
||||
msg = "The native code was built with PMDK support, and PMDK libs " +
|
||||
"were loaded successfully.";
|
||||
break;
|
||||
default:
|
||||
msg = "The state code: " + stateCode + " is unrecognized!";
|
||||
|
@ -140,7 +143,7 @@ public class NativeIO {
|
|||
|
||||
// Denotes the state of supporting PMDK. The value is set by JNI.
|
||||
private static SupportState pmdkSupportState =
|
||||
SupportState.PMDK_LIB_NOT_FOUND;
|
||||
SupportState.UNSUPPORTED;
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class);
|
||||
|
||||
|
@ -177,6 +180,14 @@ public class NativeIO {
|
|||
LOG.error("The state code: " + stateCode + " is unrecognized!");
|
||||
}
|
||||
|
||||
public static String getPmdkSupportStateMessage() {
|
||||
if (getPmdkLibPath() != null) {
|
||||
return pmdkSupportState.getMessage() +
|
||||
" The pmdk lib path: " + getPmdkLibPath();
|
||||
}
|
||||
return pmdkSupportState.getMessage();
|
||||
}
|
||||
|
||||
public static boolean isPmdkAvailable() {
|
||||
LOG.info(pmdkSupportState.getMessage());
|
||||
return pmdkSupportState == SupportState.SUPPORTED;
|
||||
|
@ -242,8 +253,13 @@ public class NativeIO {
|
|||
NativeIO.POSIX.pmemSync(region.getAddress(), region.getLength());
|
||||
}
|
||||
}
|
||||
|
||||
public static String getPmdkLibPath() {
|
||||
return POSIX.getPmdkLibPath();
|
||||
}
|
||||
}
|
||||
|
||||
private static native String getPmdkLibPath();
|
||||
private static native boolean isPmemCheck(long address, long length);
|
||||
private static native PmemMappedRegion pmemCreateMapFile(String path,
|
||||
long length);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
|
|||
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -69,6 +70,7 @@ public class NativeLibraryChecker {
|
|||
boolean snappyLoaded = false;
|
||||
boolean isalLoaded = false;
|
||||
boolean zStdLoaded = false;
|
||||
boolean pmdkLoaded = false;
|
||||
// lz4 is linked within libhadoop
|
||||
boolean lz4Loaded = nativeHadoopLoaded;
|
||||
boolean bzip2Loaded = Bzip2Factory.isNativeBzip2Loaded(conf);
|
||||
|
@ -80,6 +82,7 @@ public class NativeLibraryChecker {
|
|||
String zlibLibraryName = "";
|
||||
String snappyLibraryName = "";
|
||||
String isalDetail = "";
|
||||
String pmdkDetail = "";
|
||||
String zstdLibraryName = "";
|
||||
String lz4LibraryName = "";
|
||||
String bzip2LibraryName = "";
|
||||
|
@ -110,6 +113,12 @@ public class NativeLibraryChecker {
|
|||
isalLoaded = true;
|
||||
}
|
||||
|
||||
pmdkDetail = NativeIO.POSIX.getPmdkSupportStateMessage();
|
||||
pmdkLoaded = NativeIO.POSIX.isPmdkAvailable();
|
||||
if (pmdkLoaded) {
|
||||
pmdkDetail = NativeIO.POSIX.Pmem.getPmdkLibPath();
|
||||
}
|
||||
|
||||
openSslDetail = OpensslCipher.getLoadingFailureReason();
|
||||
if (openSslDetail != null) {
|
||||
openSslLoaded = false;
|
||||
|
@ -148,6 +157,7 @@ public class NativeLibraryChecker {
|
|||
System.out.printf("bzip2: %b %s%n", bzip2Loaded, bzip2LibraryName);
|
||||
System.out.printf("openssl: %b %s%n", openSslLoaded, openSslDetail);
|
||||
System.out.printf("ISA-L: %b %s%n", isalLoaded, isalDetail);
|
||||
System.out.printf("PMDK: %b %s%n", pmdkLoaded, pmdkDetail);
|
||||
|
||||
if (Shell.WINDOWS) {
|
||||
System.out.printf("winutils: %b %s%n", winutilsExists, winutilsPath);
|
||||
|
|
|
@ -292,10 +292,13 @@ static int loadPmdkLib(JNIEnv *env) {
|
|||
if (mid == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (strlen(errMsg) > 0) {
|
||||
// Set PMDK support state to 1 which represents PMDK_LIB_NOT_FOUND.
|
||||
(*env)->CallStaticVoidMethod(env, clazz, mid, 1);
|
||||
return 0;
|
||||
}
|
||||
// Set PMDK support state to 0 which represents SUPPORTED.
|
||||
(*env)->CallStaticVoidMethod(env, clazz, mid, 0);
|
||||
return 1;
|
||||
}
|
||||
|
@ -1620,7 +1623,7 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_pm
|
|||
char msg[1000];
|
||||
succeed = pmdkLoader->pmem_msync(address, length);
|
||||
// succeed = -1 failure
|
||||
if (succeed = -1) {
|
||||
if (succeed == -1) {
|
||||
snprintf(msg, sizeof(msg), "Failed to msync region. address: %x, length: %x, error msg: %s", address, length, pmem_errormsg());
|
||||
THROW(env, "java/io/IOException", msg);
|
||||
return;
|
||||
|
@ -1631,6 +1634,15 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_pm
|
|||
#endif
|
||||
}
|
||||
|
||||
JNIEXPORT jstring JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getPmdkLibPath
|
||||
(JNIEnv * env, jclass thisClass) {
|
||||
jstring libpath = NULL;
|
||||
|
||||
#ifdef HADOOP_PMDK_LIBRARY
|
||||
libpath = (*env)->NewStringUTF(env, HADOOP_PMDK_LIBRARY);
|
||||
#endif
|
||||
return libpath;
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
|
|
@ -59,11 +59,11 @@ static const char* load_functions() {
|
|||
void load_pmdk_lib(char* err, size_t err_len) {
|
||||
const char* errMsg;
|
||||
const char* library = NULL;
|
||||
#ifdef UNIX
|
||||
#ifdef UNIX
|
||||
Dl_info dl_info;
|
||||
#else
|
||||
#else
|
||||
LPTSTR filename = NULL;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
err[0] = '\0';
|
||||
|
||||
|
@ -88,15 +88,15 @@ void load_pmdk_lib(char* err, size_t err_len) {
|
|||
snprintf(err, err_len, "Loading functions from PMDK failed: %s", errMsg);
|
||||
}
|
||||
|
||||
#ifdef UNIX
|
||||
if(dladdr(pmdkLoader->pmem_map_file, &dl_info)) {
|
||||
#ifdef UNIX
|
||||
if (dladdr(pmdkLoader->pmem_map_file, &dl_info)) {
|
||||
library = dl_info.dli_fname;
|
||||
}
|
||||
#else
|
||||
#else
|
||||
if (GetModuleFileName(pmdkLoader->libec, filename, 256) > 0) {
|
||||
library = filename;
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
if (library == NULL) {
|
||||
library = HADOOP_PMDK_LIBRARY;
|
||||
|
|
|
@ -79,11 +79,6 @@ void *myDlsym(void *handle, const char *symbol) {
|
|||
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Return 0 if not support, 1 otherwise.
|
||||
*/
|
||||
int build_support_pmdk();
|
||||
|
||||
/**
|
||||
* Initialize and load PMDK library, returning error message if any.
|
||||
*
|
||||
|
|
|
@ -267,8 +267,8 @@ public class FsDatasetCache {
|
|||
Value prevValue = mappableBlockMap.get(key);
|
||||
boolean deferred = false;
|
||||
|
||||
if (!dataset.datanode.getShortCircuitRegistry().
|
||||
processBlockMunlockRequest(key)) {
|
||||
if (cacheLoader.isTransientCache() && !dataset.datanode.
|
||||
getShortCircuitRegistry().processBlockMunlockRequest(key)) {
|
||||
deferred = true;
|
||||
}
|
||||
if (prevValue == null) {
|
||||
|
@ -438,7 +438,11 @@ public class FsDatasetCache {
|
|||
}
|
||||
LOG.debug("Successfully cached {}. We are now caching {} bytes in"
|
||||
+ " total.", key, newUsedBytes);
|
||||
dataset.datanode.getShortCircuitRegistry().processBlockMlockEvent(key);
|
||||
// Only applicable to DRAM cache.
|
||||
if (cacheLoader.isTransientCache()) {
|
||||
dataset.datanode.
|
||||
getShortCircuitRegistry().processBlockMlockEvent(key);
|
||||
}
|
||||
numBlocksCached.addAndGet(1);
|
||||
dataset.datanode.getMetrics().incrBlocksCached(1);
|
||||
success = true;
|
||||
|
@ -476,6 +480,11 @@ public class FsDatasetCache {
|
|||
}
|
||||
|
||||
private boolean shouldDefer() {
|
||||
// Currently, defer condition is just checked for DRAM cache case.
|
||||
if (!cacheLoader.isTransientCache()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* If revocationTimeMs == 0, this is an immediate uncache request.
|
||||
* No clients were anchored at the time we made the request. */
|
||||
if (revocationTimeMs == 0) {
|
||||
|
|
Loading…
Reference in New Issue