HADOOP-6978. Adds support for NativeIO using JNI. Contributed by Todd Lipcon, Devaraj Das & Owen O'Malley.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1040883 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Devaraj Das 2010-12-01 08:03:58 +00:00
parent ebb236ef9c
commit dbd07f9e8c
20 changed files with 3113 additions and 462 deletions

View File

@ -75,6 +75,9 @@ Release 0.22.0 - Unreleased
HADOOP-7013. Add boolean field isCorrupt to BlockLocation. HADOOP-7013. Add boolean field isCorrupt to BlockLocation.
(Patrick Kling via hairong) (Patrick Kling via hairong)
HADOOP-6978. Adds support for NativeIO using JNI.
(Todd Lipcon, Devaraj Das & Owen O'Malley via ddas)
IMPROVEMENTS IMPROVEMENTS
HADOOP-6644. util.Shell getGROUPS_FOR_USER_COMMAND method name HADOOP-6644. util.Shell getGROUPS_FOR_USER_COMMAND method name

View File

@ -366,6 +366,7 @@
<mkdir dir="${build.native}/lib"/> <mkdir dir="${build.native}/lib"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/> <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/security"/> <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
<javah <javah
@ -386,6 +387,14 @@
> >
<class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" /> <class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" />
</javah> </javah>
<javah
classpath="${build.classes}"
destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
force="yes"
verbose="yes"
>
<class name="org.apache.hadoop.io.nativeio.NativeIO" />
</javah>
<exec dir="${build.native}" executable="sh" failonerror="true"> <exec dir="${build.native}" executable="sh" failonerror="true">
<env key="OS_NAME" value="${os.name}"/> <env key="OS_NAME" value="${os.name}"/>

View File

@ -0,0 +1,208 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.nativeio.Errno;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.io.nativeio.NativeIOException;
import org.apache.hadoop.io.nativeio.NativeIO.Stat;
import org.apache.hadoop.security.UserGroupInformation;
/**
* This class provides secure APIs for opening and creating files on the local
* disk. The main issue this class tries to handle is that of symlink traversal.
* <br/>
* An example of such an attack is:
* <ol>
* <li> Malicious user removes his task's syslog file, and puts a link to the
* jobToken file of a target user.</li>
* <li> Malicious user tries to open the syslog file via the servlet on the
* tasktracker.</li>
* <li> The tasktracker is unaware of the symlink, and simply streams the contents
* of the jobToken file. The malicious user can now access potentially sensitive
* map outputs, etc. of the target user's job.</li>
* </ol>
* A similar attack is possible involving task log truncation, but in that case
* due to an insecure write to a file.
* <br/>
*/
public class SecureIOUtils {
/**
* Ensure that we are set up to run with the appropriate native support code.
* If security is disabled, and the support code is unavailable, this class
* still tries its best to be secure, but is vulnerable to some race condition
* attacks.
*
* If security is enabled but the support code is unavailable, throws a
* RuntimeException since we don't want to run insecurely.
*/
static {
boolean shouldBeSecure = UserGroupInformation.isSecurityEnabled();
boolean canBeSecure = NativeIO.isAvailable();
if (!canBeSecure && shouldBeSecure) {
throw new RuntimeException(
"Secure IO is not possible without native code extensions.");
}
// Pre-cache an instance of the raw FileSystem since we sometimes
// do secure IO in a shutdown hook, where this call could fail.
try {
rawFilesystem = FileSystem.getLocal(new Configuration()).getRaw();
} catch (IOException ie) {
throw new RuntimeException(
"Couldn't obtain an instance of RawLocalFileSystem.");
}
// SecureIO just skips security checks in the case that security is
// disabled
skipSecurity = !canBeSecure;
}
private final static boolean skipSecurity;
private final static FileSystem rawFilesystem;
/**
* Open the given File for read access, verifying the expected user/group
* constraints.
* @param f the file that we are trying to open
* @param expectedOwner the expected user owner for the file
* @param expectedGroup the expected group owner for the file
* @throws IOException if an IO Error occurred, or the user/group does not
* match
*/
public static FileInputStream openForRead(File f, String expectedOwner,
String expectedGroup) throws IOException {
if (skipSecurity) {
// Subject to race conditions but this is the best we can do
FileStatus status =
rawFilesystem.getFileStatus(new Path(f.getAbsolutePath()));
checkStat(f, status.getOwner(), status.getGroup(),
expectedOwner, expectedGroup);
return new FileInputStream(f);
}
FileInputStream fis = new FileInputStream(f);
boolean success = false;
try {
Stat stat = NativeIO.fstat(fis.getFD());
checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
expectedGroup);
success = true;
return fis;
} finally {
if (!success) {
fis.close();
}
}
}
private static FileOutputStream insecureCreateForWrite(File f,
int permissions) throws IOException {
// If we can't do real security, do a racy exists check followed by an
// open and chmod
if (f.exists()) {
throw new AlreadyExistsException("File " + f + " already exists");
}
FileOutputStream fos = new FileOutputStream(f);
boolean success = false;
try {
rawFilesystem.setPermission(new Path(f.getAbsolutePath()),
new FsPermission((short)permissions));
success = true;
return fos;
} finally {
if (!success) {
fos.close();
}
}
}
/**
* Open the specified File for write access, ensuring that it does not exist.
* @param f the file that we want to create
* @param permissions we want to have on the file (if security is enabled)
*
* @throws AlreadyExistsException if the file already exists
* @throws IOException if any other error occurred
*/
public static FileOutputStream createForWrite(File f, int permissions)
throws IOException {
if (skipSecurity) {
return insecureCreateForWrite(f, permissions);
} else {
// Use the native wrapper around open(2)
try {
FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
permissions);
return new FileOutputStream(fd);
} catch (NativeIOException nioe) {
if (nioe.getErrno() == Errno.EEXIST) {
throw new AlreadyExistsException(nioe);
}
throw nioe;
}
}
}
private static void checkStat(File f, String owner, String group,
String expectedOwner,
String expectedGroup) throws IOException {
if (expectedOwner != null &&
!expectedOwner.equals(owner)) {
throw new IOException(
"Owner '" + owner + "' for path " + f + " did not match " +
"expected owner '" + expectedOwner + "'");
}
if (expectedGroup != null &&
!expectedGroup.equals(group)) {
throw new IOException(
"Group '" + group + "' for path " + f + " did not match " +
"expected group '" + expectedGroup + "'");
}
}
/**
* Signals that an attempt to create a file at a given pathname has failed
* because another file already existed at that path.
*/
public static class AlreadyExistsException extends IOException {
private static final long serialVersionUID = 1L;
public AlreadyExistsException(String msg) {
super(msg);
}
public AlreadyExistsException(Throwable cause) {
super(cause);
}
}
}

View File

@ -0,0 +1,60 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.nativeio;
/**
* Enum representing POSIX errno values.
*/
public enum Errno {
EPERM,
ENOENT,
ESRCH,
EINTR,
EIO,
ENXIO,
E2BIG,
ENOEXEC,
EBADF,
ECHILD,
EAGAIN,
ENOMEM,
EACCES,
EFAULT,
ENOTBLK,
EBUSY,
EEXIST,
EXDEV,
ENODEV,
ENOTDIR,
EISDIR,
EINVAL,
ENFILE,
EMFILE,
ENOTTY,
ETXTBSY,
EFBIG,
ENOSPC,
ESPIPE,
EROFS,
EMLINK,
EPIPE,
EDOM,
ERANGE,
UNKNOWN;
}

View File

@ -0,0 +1,126 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.nativeio;
import java.io.FileDescriptor;
import java.io.IOException;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* JNI wrappers for various native IO-related calls not available in Java.
* These functions should generally be used alongside a fallback to another
* more portable mechanism.
*/
public class NativeIO {
// Flags for open() call from bits/fcntl.h
public static final int O_RDONLY = 00;
public static final int O_WRONLY = 01;
public static final int O_RDWR = 02;
public static final int O_CREAT = 0100;
public static final int O_EXCL = 0200;
public static final int O_NOCTTY = 0400;
public static final int O_TRUNC = 01000;
public static final int O_APPEND = 02000;
public static final int O_NONBLOCK = 04000;
public static final int O_SYNC = 010000;
public static final int O_ASYNC = 020000;
public static final int O_FSYNC = O_SYNC;
public static final int O_NDELAY = O_NONBLOCK;
private static final Log LOG = LogFactory.getLog(NativeIO.class);
private static boolean nativeLoaded = false;
static {
if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
initNative();
nativeLoaded = true;
} catch (Throwable t) {
// This can happen if the user has an older version of libhadoop.so
// installed - in this case we can continue without native IO
// after warning
LOG.error("Unable to initialize NativeIO libraries", t);
}
}
}
/**
* Return true if the JNI-based native IO extensions are available.
*/
public static boolean isAvailable() {
return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
}
/** Wrapper around open(2) */
public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
/** Wrapper around fstat(2) */
public static native Stat fstat(FileDescriptor fd) throws IOException;
/** Initialize the JNI method ID and class ID cache */
private static native void initNative();
/**
* Result type of the fstat call
*/
public static class Stat {
private String owner, group;
private int mode;
// Mode constants
public static final int S_IFMT = 0170000; /* type of file */
public static final int S_IFIFO = 0010000; /* named pipe (fifo) */
public static final int S_IFCHR = 0020000; /* character special */
public static final int S_IFDIR = 0040000; /* directory */
public static final int S_IFBLK = 0060000; /* block special */
public static final int S_IFREG = 0100000; /* regular */
public static final int S_IFLNK = 0120000; /* symbolic link */
public static final int S_IFSOCK = 0140000; /* socket */
public static final int S_IFWHT = 0160000; /* whiteout */
public static final int S_ISUID = 0004000; /* set user id on execution */
public static final int S_ISGID = 0002000; /* set group id on execution */
public static final int S_ISVTX = 0001000; /* save swapped text even after use */
public static final int S_IRUSR = 0000400; /* read permission, owner */
public static final int S_IWUSR = 0000200; /* write permission, owner */
public static final int S_IXUSR = 0000100; /* execute/search permission, owner */
Stat(String owner, String group, int mode) {
this.owner = owner;
this.group = group;
this.mode = mode;
}
public String toString() {
return "Stat(owner='" + owner + "', group='" + group + "'" +
", mode=" + mode + ")";
}
public String getOwner() {
return owner;
}
public String getGroup() {
return group;
}
public int getMode() {
return mode;
}
}
}

View File

@ -0,0 +1,46 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.nativeio;
import java.io.IOException;
/**
* An exception generated by a call to the native IO code.
*
* These exceptions simply wrap <i>errno</i> result codes.
*/
public class NativeIOException extends IOException {
private static final long serialVersionUID = 1L;
private Errno errno;
public NativeIOException(String msg, Errno errno) {
super(msg);
this.errno = errno;
}
public Errno getErrno() {
return errno;
}
public String toString() {
return errno.toString() + ": " + super.getMessage();
}
}

View File

@ -33,7 +33,8 @@ export PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z])
AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \ AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
-Isrc/org/apache/hadoop/io/compress/zlib \ -Isrc/org/apache/hadoop/io/compress/zlib \
-Isrc/org/apache/hadoop/security -Isrc/org/apache/hadoop/security \
-Isrc/org/apache/hadoop/io/nativeio/
AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL) AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL) AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
@ -41,8 +42,12 @@ lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \ libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \ src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
src/org/apache/hadoop/security/getGroup.c \ src/org/apache/hadoop/security/getGroup.c \
src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
libhadoop_la_LDFLAGS = -version-info 1:0:0 src/org/apache/hadoop/io/nativeio/file_descriptor.c \
src/org/apache/hadoop/io/nativeio/errno_enum.c \
src/org/apache/hadoop/io/nativeio/NativeIO.c
libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
libhadoop_la_LIBADD = -ldl -ljvm libhadoop_la_LIBADD = -ldl -ljvm
# #

View File

@ -93,7 +93,8 @@ libLTLIBRARIES_INSTALL = $(INSTALL)
LTLIBRARIES = $(lib_LTLIBRARIES) LTLIBRARIES = $(lib_LTLIBRARIES)
libhadoop_la_DEPENDENCIES = libhadoop_la_DEPENDENCIES =
am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \ am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \
getGroup.lo JniBasedUnixGroupsMapping.lo getGroup.lo JniBasedUnixGroupsMapping.lo file_descriptor.lo \
errno_enum.lo NativeIO.lo
libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS) libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS)
DEFAULT_INCLUDES = -I. -I$(srcdir) -I. DEFAULT_INCLUDES = -I. -I$(srcdir) -I.
depcomp = $(SHELL) $(top_srcdir)/config/depcomp depcomp = $(SHELL) $(top_srcdir)/config/depcomp
@ -222,7 +223,8 @@ sysconfdir = @sysconfdir@
target_alias = @target_alias@ target_alias = @target_alias@
AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \ AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
-Isrc/org/apache/hadoop/io/compress/zlib \ -Isrc/org/apache/hadoop/io/compress/zlib \
-Isrc/org/apache/hadoop/security -Isrc/org/apache/hadoop/security \
-Isrc/org/apache/hadoop/io/nativeio/
AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL) AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL) AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
@ -230,9 +232,12 @@ lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \ libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \ src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
src/org/apache/hadoop/security/getGroup.c \ src/org/apache/hadoop/security/getGroup.c \
src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
src/org/apache/hadoop/io/nativeio/file_descriptor.c \
src/org/apache/hadoop/io/nativeio/errno_enum.c \
src/org/apache/hadoop/io/nativeio/NativeIO.c
libhadoop_la_LDFLAGS = -version-info 1:0:0 libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
libhadoop_la_LIBADD = -ldl -ljvm libhadoop_la_LIBADD = -ldl -ljvm
all: config.h all: config.h
$(MAKE) $(AM_MAKEFLAGS) all-am $(MAKE) $(AM_MAKEFLAGS) all-am
@ -326,8 +331,11 @@ distclean-compile:
-rm -f *.tab.c -rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/NativeIO.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/errno_enum.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/file_descriptor.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@
.c.o: .c.o:
@ -379,6 +387,27 @@ JniBasedUnixGroupsMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsM
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c @am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
file_descriptor.lo: src/org/apache/hadoop/io/nativeio/file_descriptor.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT file_descriptor.lo -MD -MP -MF "$(DEPDIR)/file_descriptor.Tpo" -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/file_descriptor.Tpo" "$(DEPDIR)/file_descriptor.Plo"; else rm -f "$(DEPDIR)/file_descriptor.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/file_descriptor.c' object='file_descriptor.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c
errno_enum.lo: src/org/apache/hadoop/io/nativeio/errno_enum.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT errno_enum.lo -MD -MP -MF "$(DEPDIR)/errno_enum.Tpo" -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/errno_enum.Tpo" "$(DEPDIR)/errno_enum.Plo"; else rm -f "$(DEPDIR)/errno_enum.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/errno_enum.c' object='errno_enum.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c
NativeIO.lo: src/org/apache/hadoop/io/nativeio/NativeIO.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT NativeIO.lo -MD -MP -MF "$(DEPDIR)/NativeIO.Tpo" -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/NativeIO.Tpo" "$(DEPDIR)/NativeIO.Plo"; else rm -f "$(DEPDIR)/NativeIO.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/NativeIO.c' object='NativeIO.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c
mostlyclean-libtool: mostlyclean-libtool:
-rm -f *.lo -rm -f *.lo

View File

@ -3,6 +3,10 @@
/* The 'actual' dynamic-library for '-lz' */ /* The 'actual' dynamic-library for '-lz' */
#undef HADOOP_ZLIB_LIBRARY #undef HADOOP_ZLIB_LIBRARY
/* Define to 1 if you have the declaration of `strerror_r', and to 0 if you
don't. */
#undef HAVE_DECL_STRERROR_R
/* Define to 1 if you have the <dlfcn.h> header file. */ /* Define to 1 if you have the <dlfcn.h> header file. */
#undef HAVE_DLFCN_H #undef HAVE_DLFCN_H
@ -39,6 +43,9 @@
/* Define to 1 if you have the <stdlib.h> header file. */ /* Define to 1 if you have the <stdlib.h> header file. */
#undef HAVE_STDLIB_H #undef HAVE_STDLIB_H
/* Define to 1 if you have the `strerror_r' function. */
#undef HAVE_STRERROR_R
/* Define to 1 if you have the <strings.h> header file. */ /* Define to 1 if you have the <strings.h> header file. */
#undef HAVE_STRINGS_H #undef HAVE_STRINGS_H
@ -81,8 +88,17 @@
/* Define to 1 if you have the ANSI C header files. */ /* Define to 1 if you have the ANSI C header files. */
#undef STDC_HEADERS #undef STDC_HEADERS
/* Define to 1 if strerror_r returns char *. */
#undef STRERROR_R_CHAR_P
/* Version number of package */ /* Version number of package */
#undef VERSION #undef VERSION
/* Number of bits in a file offset, on hosts where this is settable. */
#undef _FILE_OFFSET_BITS
/* Define for large files, on AIX-style hosts. */
#undef _LARGE_FILES
/* Define to empty if `const' does not conform to ANSI C. */ /* Define to empty if `const' does not conform to ANSI C. */
#undef const #undef const

2297
src/native/configure vendored

File diff suppressed because it is too large Load Diff

View File

@ -38,6 +38,7 @@ AC_INIT(src/org_apache_hadoop.h)
AC_CONFIG_SRCDIR([src/org_apache_hadoop.h]) AC_CONFIG_SRCDIR([src/org_apache_hadoop.h])
AC_CONFIG_AUX_DIR(config) AC_CONFIG_AUX_DIR(config)
AC_CONFIG_HEADER([config.h]) AC_CONFIG_HEADER([config.h])
AC_SYS_LARGEFILE
AM_INIT_AUTOMAKE(hadoop,1.0.0) AM_INIT_AUTOMAKE(hadoop,1.0.0)
@ -95,6 +96,9 @@ AC_C_CONST
# Checks for library functions. # Checks for library functions.
AC_CHECK_FUNCS([memset]) AC_CHECK_FUNCS([memset])
# Check for nonstandard STRERROR_R
AC_FUNC_STRERROR_R
AC_CONFIG_FILES([Makefile]) AC_CONFIG_FILES([Makefile])
AC_OUTPUT AC_OUTPUT

View File

@ -36,7 +36,7 @@ AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
lib_LTLIBRARIES = libhadoop.la lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES = libhadoop_la_SOURCES =
libhadoop_la_LDFLAGS = -version-info 1:0:0 libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm
# #

View File

@ -0,0 +1,277 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// get the autoconf settings
#include "config.h"
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
#include <grp.h>
#include <jni.h>
#include <pwd.h>
#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_nativeio_NativeIO.h"
#include "file_descriptor.h"
#include "errno_enum.h"
// the NativeIO$Stat inner class and its constructor
static jclass stat_clazz;
static jmethodID stat_ctor;
// the NativeIOException class and its constructor
static jclass nioe_clazz;
static jmethodID nioe_ctor;
// Internal functions
static void throw_ioe(JNIEnv* env, int errnum);
static ssize_t get_pw_buflen();
static void stat_init(JNIEnv *env) {
// Init Stat
jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
PASS_EXCEPTIONS(env);
stat_clazz = (*env)->NewGlobalRef(env, clazz);
stat_ctor = (*env)->GetMethodID(env, stat_clazz, "<init>",
"(Ljava/lang/String;Ljava/lang/String;I)V");
}
static void stat_deinit(JNIEnv *env) {
if (stat_clazz != NULL) {
(*env)->DeleteGlobalRef(env, stat_clazz);
stat_clazz = NULL;
}
}
static void nioe_init(JNIEnv *env) {
// Init NativeIOException
nioe_clazz = (*env)->FindClass(
env, "org/apache/hadoop/io/nativeio/NativeIOException");
PASS_EXCEPTIONS(env);
nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
"(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
}
static void nioe_deinit(JNIEnv *env) {
if (nioe_clazz != NULL) {
(*env)->DeleteGlobalRef(env, nioe_clazz);
nioe_clazz = NULL;
}
nioe_ctor = NULL;
}
/*
* private static native void initNative();
*
* We rely on this function rather than lazy initialization because
* the lazy approach may have a race if multiple callers try to
* init at the same time.
*/
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
JNIEnv *env, jclass clazz) {
stat_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
nioe_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
fd_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
errno_enum_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
return;
error:
// these are all idempodent and safe to call even if the
// class wasn't initted yet
stat_deinit(env);
nioe_deinit(env);
fd_deinit(env);
errno_enum_deinit(env);
}
/*
* public static native Stat fstat(FileDescriptor fd);
*/
JNIEXPORT jobject JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
JNIEnv *env, jclass clazz, jobject fd_object)
{
jobject ret = NULL;
char *pw_buf = NULL;
int fd = fd_get(env, fd_object);
PASS_EXCEPTIONS_GOTO(env, cleanup);
struct stat s;
int rc = fstat(fd, &s);
if (rc != 0) {
throw_ioe(env, errno);
goto cleanup;
}
size_t pw_buflen = get_pw_buflen();
if ((pw_buf = malloc(pw_buflen)) == NULL) {
THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
goto cleanup;
}
// Grab username
struct passwd pwd, *pwdp;
while ((rc = getpwuid_r(s.st_uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
if (rc != ERANGE) {
throw_ioe(env, rc);
goto cleanup;
}
free(pw_buf);
pw_buflen *= 2;
if ((pw_buf = malloc(pw_buflen)) == NULL) {
THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
goto cleanup;
}
}
assert(pwdp == &pwd);
jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
if (jstr_username == NULL) goto cleanup;
// Grab group
struct group grp, *grpp;
while ((rc = getgrgid_r(s.st_gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
if (rc != ERANGE) {
throw_ioe(env, rc);
goto cleanup;
}
free(pw_buf);
pw_buflen *= 2;
if ((pw_buf = malloc(pw_buflen)) == NULL) {
THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
goto cleanup;
}
}
assert(grpp == &grp);
jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
PASS_EXCEPTIONS_GOTO(env, cleanup);
// Construct result
ret = (*env)->NewObject(env, stat_clazz, stat_ctor,
jstr_username, jstr_groupname, s.st_mode);
cleanup:
if (pw_buf != NULL) free(pw_buf);
return ret;
}
/*
* public static native FileDescriptor open(String path, int flags, int mode);
*/
JNIEXPORT jobject JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_open(
JNIEnv *env, jclass clazz, jstring j_path,
jint flags, jint mode)
{
jobject ret = NULL;
const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
if (path == NULL) goto cleanup; // JVM throws Exception for us
int fd;
if (flags & O_CREAT) {
fd = open(path, flags, mode);
} else {
fd = open(path, flags);
}
if (fd == -1) {
throw_ioe(env, errno);
goto cleanup;
}
ret = fd_create(env, fd);
cleanup:
if (path != NULL) {
(*env)->ReleaseStringUTFChars(env, j_path, path);
}
return ret;
}
/*
* Throw a java.IO.IOException, generating the message from errno.
*/
static void throw_ioe(JNIEnv* env, int errnum)
{
const char* message;
char buffer[80];
jstring jstr_message;
buffer[0] = 0;
#ifdef STRERROR_R_CHAR_P
// GNU strerror_r
message = strerror_r(errnum, buffer, sizeof(buffer));
assert (message != NULL);
#else
int ret = strerror_r(errnum, buffer, sizeof(buffer));
if (ret == 0) {
message = buffer;
} else {
message = "Unknown error";
}
#endif
jobject errno_obj = errno_to_enum(env, errnum);
if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL)
goto err;
jthrowable obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
jstr_message, errno_obj);
if (obj == NULL) goto err;
(*env)->Throw(env, obj);
return;
err:
if (jstr_message != NULL)
(*env)->ReleaseStringUTFChars(env, jstr_message, message);
}
/*
* Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
*/
ssize_t get_pw_buflen() {
size_t ret = 0;
#ifdef _SC_GETPW_R_SIZE_MAX
ret = sysconf(_SC_GETPW_R_SIZE_MAX);
#endif
return (ret > 512) ? ret : 512;
}
/**
* vim: sw=2: ts=2: et:
*/

View File

@ -0,0 +1,119 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <assert.h>
#include <errno.h>
#include <jni.h>
#include "org_apache_hadoop.h"
typedef struct errno_mapping {
int errno_val;
char *errno_str;
} errno_mapping_t;
// Macro to define structs like {FOO, "FOO"} for each errno value
#define MAPPING(x) {x, #x}
static errno_mapping_t ERRNO_MAPPINGS[] = {
MAPPING(EPERM),
MAPPING(ENOENT),
MAPPING(ESRCH),
MAPPING(EINTR),
MAPPING(EIO),
MAPPING(ENXIO),
MAPPING(E2BIG),
MAPPING(ENOEXEC),
MAPPING(EBADF),
MAPPING(ECHILD),
MAPPING(EAGAIN),
MAPPING(ENOMEM),
MAPPING(EACCES),
MAPPING(EFAULT),
MAPPING(ENOTBLK),
MAPPING(EBUSY),
MAPPING(EEXIST),
MAPPING(EXDEV),
MAPPING(ENODEV),
MAPPING(ENOTDIR),
MAPPING(EISDIR),
MAPPING(EINVAL),
MAPPING(ENFILE),
MAPPING(EMFILE),
MAPPING(ENOTTY),
MAPPING(ETXTBSY),
MAPPING(EFBIG),
MAPPING(ENOSPC),
MAPPING(ESPIPE),
MAPPING(EROFS),
MAPPING(EMLINK),
MAPPING(EPIPE),
MAPPING(EDOM),
MAPPING(ERANGE),
{-1, NULL}
};
static jclass enum_class;
static jmethodID enum_valueOf;
static jclass errno_class;
void errno_enum_init(JNIEnv *env) {
if (enum_class != NULL) return;
enum_class = (*env)->FindClass(env, "java/lang/Enum");
PASS_EXCEPTIONS(env);
enum_class = (*env)->NewGlobalRef(env, enum_class);
enum_valueOf = (*env)->GetStaticMethodID(env, enum_class,
"valueOf", "(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;");
PASS_EXCEPTIONS(env);
errno_class = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/Errno");
PASS_EXCEPTIONS(env);
errno_class = (*env)->NewGlobalRef(env, errno_class);
}
void errno_enum_deinit(JNIEnv *env) {
if (enum_class != NULL) {
(*env)->DeleteGlobalRef(env, enum_class);
enum_class = NULL;
}
if (errno_class != NULL) {
(*env)->DeleteGlobalRef(env, errno_class);
errno_class = NULL;
}
enum_valueOf = NULL;
}
static char *errno_to_string(int errnum) {
int i;
for (i = 0; ERRNO_MAPPINGS[i].errno_str != NULL; i++) {
if (ERRNO_MAPPINGS[i].errno_val == errnum)
return ERRNO_MAPPINGS[i].errno_str;
}
return "UNKNOWN";
}
jobject errno_to_enum(JNIEnv *env, int errnum) {
char *str = errno_to_string(errnum);
assert(str != NULL);
jstring jstr = (*env)->NewStringUTF(env, str);
PASS_EXCEPTIONS_RET(env, NULL);
return (*env)->CallStaticObjectMethod(
env, enum_class, enum_valueOf, errno_class, jstr);
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ERRNO_ENUM_H
#define ERRNO_ENUM_H
#include <jni.h>
void errno_enum_init(JNIEnv *env);
void errno_enum_deinit(JNIEnv *env);
jobject errno_to_enum(JNIEnv *env, int errnum);
#endif

View File

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include "file_descriptor.h"
#include "org_apache_hadoop.h"
// class of java.io.FileDescriptor
static jclass fd_class;
// the internal field for the integer fd
static jfieldID fd_descriptor;
// the no-argument constructor
static jmethodID fd_constructor;
void fd_init(JNIEnv* env)
{
if (fd_class != NULL) return; // already initted
fd_class = (*env)->FindClass(env, "java/io/FileDescriptor");
PASS_EXCEPTIONS(env);
fd_class = (*env)->NewGlobalRef(env, fd_class);
fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
PASS_EXCEPTIONS(env);
fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
}
void fd_deinit(JNIEnv *env) {
if (fd_class != NULL) {
(*env)->DeleteGlobalRef(env, fd_class);
fd_class = NULL;
}
fd_descriptor = NULL;
fd_constructor = NULL;
}
/*
* Given an instance 'obj' of java.io.FileDescriptor, return the
* underlying fd, or throw if unavailable
*/
int fd_get(JNIEnv* env, jobject obj) {
return (*env)->GetIntField(env, obj, fd_descriptor);
}
/*
* Create a FileDescriptor object corresponding to the given int fd
*/
jobject fd_create(JNIEnv *env, int fd) {
jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
PASS_EXCEPTIONS_RET(env, NULL);
(*env)->SetIntField(env, obj, fd_descriptor, fd);
return obj;
}

View File

@ -0,0 +1,28 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FILE_DESCRIPTOR_H
#define FILE_DESCRIPTOR_H
#include <jni.h>
void fd_init(JNIEnv *env);
void fd_deinit(JNIEnv *env);
int fd_get(JNIEnv* env, jobject obj);
jobject fd_create(JNIEnv *env, int fd);
#endif

View File

@ -50,6 +50,22 @@
} \ } \
} }
/* Helper macro to return if an exception is pending */
#define PASS_EXCEPTIONS(env) \
{ \
if ((*env)->ExceptionCheck(env)) return; \
}
#define PASS_EXCEPTIONS_GOTO(env, target) \
{ \
if ((*env)->ExceptionCheck(env)) goto target; \
}
#define PASS_EXCEPTIONS_RET(env, ret) \
{ \
if ((*env)->ExceptionCheck(env)) return (ret); \
}
/** /**
* A helper function to dlsym a 'symbol' from a given library-handle. * A helper function to dlsym a 'symbol' from a given library-handle.
* *

View File

@ -0,0 +1,83 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.junit.BeforeClass;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assume.*;
import static org.junit.Assert.*;
import java.io.IOException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
public class TestSecureIOUtils {
private static String realOwner, realGroup;
private static final File testFilePath =
new File(System.getProperty("test.build.data"), "TestSecureIOContext");
@BeforeClass
public static void makeTestFile() throws Exception {
FileOutputStream fos = new FileOutputStream(testFilePath);
fos.write("hello".getBytes("UTF-8"));
fos.close();
Configuration conf = new Configuration();
FileSystem rawFS = FileSystem.getLocal(conf).getRaw();
FileStatus stat = rawFS.getFileStatus(
new Path(testFilePath.toString()));
realOwner = stat.getOwner();
realGroup = stat.getGroup();
}
@Test
public void testReadUnrestricted() throws IOException {
SecureIOUtils.openForRead(testFilePath, null, null).close();
}
@Test
public void testReadCorrectlyRestrictedWithSecurity() throws IOException {
SecureIOUtils
.openForRead(testFilePath, realOwner, realGroup).close();
}
@Test(expected=IOException.class)
public void testReadIncorrectlyRestrictedWithSecurity() throws IOException {
SecureIOUtils
.openForRead(testFilePath, "invalidUser", null).close();
fail("Didn't throw expection for wrong ownership!");
}
@Test
public void testCreateForWrite() throws IOException {
try {
SecureIOUtils.createForWrite(testFilePath, 0777);
fail("Was able to create file at " + testFilePath);
} catch (SecureIOUtils.AlreadyExistsException aee) {
// expected
}
}
}

View File

@ -0,0 +1,137 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.nativeio;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assume.*;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.NativeCodeLoader;
public class TestNativeIO {
static final Log LOG = LogFactory.getLog(TestNativeIO.class);
static final File TEST_DIR = new File(
System.getProperty("test.build.data"), "testnativeio");
@Before
public void checkLoaded() {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
}
@Before
public void setupTestDir() throws IOException {
FileUtil.fullyDelete(TEST_DIR);
TEST_DIR.mkdirs();
}
@Test
public void testFstat() throws Exception {
FileOutputStream fos = new FileOutputStream(
new File(TEST_DIR, "testfstat"));
NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
fos.close();
LOG.info("Stat: " + String.valueOf(stat));
assertEquals(System.getProperty("user.name"), stat.getOwner());
assertNotNull(stat.getGroup());
assertTrue(!"".equals(stat.getGroup()));
assertEquals("Stat mode field should indicate a regular file",
NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
}
@Test
public void testFstatClosedFd() throws Exception {
FileOutputStream fos = new FileOutputStream(
new File(TEST_DIR, "testfstat2"));
fos.close();
try {
NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
} catch (NativeIOException nioe) {
LOG.info("Got expected exception", nioe);
assertEquals(Errno.EBADF, nioe.getErrno());
}
}
@Test
public void testOpenMissingWithoutCreate() throws Exception {
LOG.info("Open a missing file without O_CREAT and it should fail");
try {
FileDescriptor fd = NativeIO.open(
new File(TEST_DIR, "doesntexist").getAbsolutePath(),
NativeIO.O_WRONLY, 0700);
fail("Able to open a new file without O_CREAT");
} catch (NativeIOException nioe) {
LOG.info("Got expected exception", nioe);
assertEquals(Errno.ENOENT, nioe.getErrno());
}
}
@Test
public void testOpenWithCreate() throws Exception {
LOG.info("Test creating a file with O_CREAT");
FileDescriptor fd = NativeIO.open(
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
assertNotNull(true);
assertTrue(fd.valid());
FileOutputStream fos = new FileOutputStream(fd);
fos.write("foo".getBytes());
fos.close();
assertFalse(fd.valid());
LOG.info("Test exclusive create");
try {
fd = NativeIO.open(
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
fail("Was able to create existing file with O_EXCL");
} catch (NativeIOException nioe) {
LOG.info("Got expected exception for failed exclusive create", nioe);
assertEquals(Errno.EEXIST, nioe.getErrno());
}
}
/**
* Test that opens and closes a file 10000 times - this would crash with
* "Too many open files" if we leaked fds using this access pattern.
*/
@Test
public void testFDDoesntLeak() throws IOException {
for (int i = 0; i < 10000; i++) {
FileDescriptor fd = NativeIO.open(
new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
assertNotNull(true);
assertTrue(fd.valid());
FileOutputStream fos = new FileOutputStream(fd);
fos.write("foo".getBytes());
fos.close();
}
}
}