HADOOP-6818. Provides a JNI implementation of group resolution. Contributed by Devaraj Das.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1030646 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Devaraj Das 2010-11-03 20:20:33 +00:00
parent 7daf9a3d3d
commit dfe57e0b1a
13 changed files with 4188 additions and 3756 deletions

View File

@ -158,6 +158,8 @@ Trunk (unreleased changes)
HADOOP-7008. Enable test-patch.sh to have a configured number of acceptable
findbugs and javadoc warnings. (nigel and gkesavan)
HADOOP-6818. Provides a JNI implementation of group resolution. (ddas)
OPTIMIZATIONS
HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).

View File

@ -365,6 +365,7 @@
<mkdir dir="${build.native}/lib"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
<javah
classpath="${build.classes}"
@ -376,6 +377,15 @@
<class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
</javah>
<javah
classpath="${build.classes}"
destdir="${build.native}/src/org/apache/hadoop/security"
force="yes"
verbose="yes"
>
<class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" />
</javah>
<exec dir="${build.native}" executable="sh" failonerror="true">
<env key="OS_NAME" value="${os.name}"/>
<env key="OS_ARCH" value="${os.arch}"/>
@ -392,7 +402,7 @@
</exec>
<exec dir="${build.native}" executable="sh" failonerror="true">
<arg line="${build.native}/libtool --mode=install cp ${build.native}/lib/libhadoop.la ${build.native}/lib"/>
<arg line="${build.native}/libtool --mode=install cp ${build.native}/libhadoop.la ${build.native}/lib"/>
</exec>
</target>

View File

@ -0,0 +1,60 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.NativeCodeLoader;
/**
* A JNI-based implementation of {@link GroupMappingServiceProvider}
* that invokes libC calls to get the group
* memberships of a given user.
*/
public class JniBasedUnixGroupsMapping implements GroupMappingServiceProvider {
private static final Log LOG =
LogFactory.getLog(ShellBasedUnixGroupsMapping.class);
native String[] getGroupForUser(String user);
static {
if (!NativeCodeLoader.isNativeCodeLoaded()) {
throw new RuntimeException("Bailing out since native library couldn't " +
"be loaded");
}
LOG.info("Using JniBasedUnixGroupsMapping for Group resolution");
}
@Override
public List<String> getGroups(String user) throws IOException {
String[] groups = new String[0];
try {
groups = getGroupForUser(user);
} catch (Exception e) {
LOG.warn("Got exception while trying to obtain the groups for user "
+ user);
}
return Arrays.asList(groups);
}
}

View File

@ -16,10 +16,6 @@
# limitations under the License.
#
#
# Top-level makefile template for native hadoop code
#
#
# Notes:
# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os-arch}.
@ -35,11 +31,19 @@
# Export $(PLATFORM) to prevent proliferation of sub-shells
export PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z])
# List the sub-directories here
SUBDIRS = src/org/apache/hadoop/io/compress/zlib lib
AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
-Isrc/org/apache/hadoop/io/compress/zlib \
-Isrc/org/apache/hadoop/security
AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
# The following export is needed to build libhadoop.so in the 'lib' directory
export SUBDIRS
lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
src/org/apache/hadoop/security/getGroup.c \
src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
libhadoop_la_LDFLAGS = -version-info 1:0:0
libhadoop_la_LIBADD = -ldl -ljvm
#
#vim: sw=4: ts=4: noet

View File

@ -1,8 +1,8 @@
# Makefile.in generated by automake 1.9.2 from Makefile.am.
# Makefile.in generated by automake 1.9.6 from Makefile.am.
# @configure_input@
# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
# 2003, 2004 Free Software Foundation, Inc.
# 2003, 2004, 2005 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
@ -32,10 +32,6 @@
# limitations under the License.
#
#
# Top-level makefile template for native hadoop code
#
#
# Notes:
# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os-arch}.
@ -47,6 +43,7 @@
# * OS_ARCH
# All these are setup by build.xml.
#
srcdir = @srcdir@
top_srcdir = @top_srcdir@
VPATH = @srcdir@
@ -85,17 +82,34 @@ am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = config.h
CONFIG_CLEAN_FILES =
SOURCES =
DIST_SOURCES =
RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \
html-recursive info-recursive install-data-recursive \
install-exec-recursive install-info-recursive \
install-recursive installcheck-recursive installdirs-recursive \
pdf-recursive ps-recursive uninstall-info-recursive \
uninstall-recursive
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = `echo $$p | sed -e 's|^.*/||'`;
am__installdirs = "$(DESTDIR)$(libdir)"
libLTLIBRARIES_INSTALL = $(INSTALL)
LTLIBRARIES = $(lib_LTLIBRARIES)
libhadoop_la_DEPENDENCIES =
am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \
getGroup.lo JniBasedUnixGroupsMapping.lo
libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS)
DEFAULT_INCLUDES = -I. -I$(srcdir) -I.
depcomp = $(SHELL) $(top_srcdir)/config/depcomp
am__depfiles_maybe = depfiles
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
LTCOMPILE = $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CFLAGS) $(CFLAGS)
CCLD = $(CC)
LINK = $(LIBTOOL) --tag=CC --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
SOURCES = $(libhadoop_la_SOURCES)
DIST_SOURCES = $(libhadoop_la_SOURCES)
ETAGS = etags
CTAGS = ctags
DIST_SUBDIRS = $(SUBDIRS)
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
distdir = $(PACKAGE)-$(VERSION)
top_distdir = $(distdir)
@ -158,6 +172,7 @@ PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
RANLIB = @RANLIB@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
@ -205,13 +220,25 @@ sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
-Isrc/org/apache/hadoop/io/compress/zlib \
-Isrc/org/apache/hadoop/security
# List the sub-directories here
SUBDIRS = src/org/apache/hadoop/io/compress/zlib lib
AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
src/org/apache/hadoop/security/getGroup.c \
src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
libhadoop_la_LDFLAGS = -version-info 1:0:0
libhadoop_la_LIBADD = -ldl -ljvm
all: config.h
$(MAKE) $(AM_MAKEFLAGS) all-recursive
$(MAKE) $(AM_MAKEFLAGS) all-am
.SUFFIXES:
.SUFFIXES: .c .lo .o .obj
am--refresh:
@:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@ -262,6 +289,95 @@ $(srcdir)/config.h.in: $(am__configure_deps)
distclean-hdr:
-rm -f config.h stamp-h1
install-libLTLIBRARIES: $(lib_LTLIBRARIES)
@$(NORMAL_INSTALL)
test -z "$(libdir)" || $(mkdir_p) "$(DESTDIR)$(libdir)"
@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
if test -f $$p; then \
f=$(am__strip_dir) \
echo " $(LIBTOOL) --mode=install $(libLTLIBRARIES_INSTALL) $(INSTALL_STRIP_FLAG) '$$p' '$(DESTDIR)$(libdir)/$$f'"; \
$(LIBTOOL) --mode=install $(libLTLIBRARIES_INSTALL) $(INSTALL_STRIP_FLAG) "$$p" "$(DESTDIR)$(libdir)/$$f"; \
else :; fi; \
done
uninstall-libLTLIBRARIES:
@$(NORMAL_UNINSTALL)
@set -x; list='$(lib_LTLIBRARIES)'; for p in $$list; do \
p=$(am__strip_dir) \
echo " $(LIBTOOL) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$p'"; \
$(LIBTOOL) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$p"; \
done
clean-libLTLIBRARIES:
-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
test "$$dir" != "$$p" || dir=.; \
echo "rm -f \"$${dir}/so_locations\""; \
rm -f "$${dir}/so_locations"; \
done
libhadoop.la: $(libhadoop_la_OBJECTS) $(libhadoop_la_DEPENDENCIES)
$(LINK) -rpath $(libdir) $(libhadoop_la_LDFLAGS) $(libhadoop_la_OBJECTS) $(libhadoop_la_LIBADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@
.c.o:
@am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(COMPILE) -c $<
.c.obj:
@am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Po"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(COMPILE) -c `$(CYGPATH_W) '$<'`
.c.lo:
@am__fastdepCC_TRUE@ if $(LTCOMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/$*.Tpo" "$(DEPDIR)/$*.Plo"; else rm -f "$(DEPDIR)/$*.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LTCOMPILE) -c -o $@ $<
ZlibCompressor.lo: src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT ZlibCompressor.lo -MD -MP -MF "$(DEPDIR)/ZlibCompressor.Tpo" -c -o ZlibCompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/ZlibCompressor.Tpo" "$(DEPDIR)/ZlibCompressor.Plo"; else rm -f "$(DEPDIR)/ZlibCompressor.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c' object='ZlibCompressor.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o ZlibCompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
ZlibDecompressor.lo: src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT ZlibDecompressor.lo -MD -MP -MF "$(DEPDIR)/ZlibDecompressor.Tpo" -c -o ZlibDecompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/ZlibDecompressor.Tpo" "$(DEPDIR)/ZlibDecompressor.Plo"; else rm -f "$(DEPDIR)/ZlibDecompressor.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c' object='ZlibDecompressor.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o ZlibDecompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
getGroup.lo: src/org/apache/hadoop/security/getGroup.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT getGroup.lo -MD -MP -MF "$(DEPDIR)/getGroup.Tpo" -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/getGroup.Tpo" "$(DEPDIR)/getGroup.Plo"; else rm -f "$(DEPDIR)/getGroup.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/getGroup.c' object='getGroup.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c
JniBasedUnixGroupsMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c; \
@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo"; exit 1; fi
@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' object='JniBasedUnixGroupsMapping.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
mostlyclean-libtool:
-rm -f *.lo
@ -273,65 +389,6 @@ distclean-libtool:
-rm -f libtool
uninstall-info-am:
# This directory's subdirectories are mostly independent; you can cd
# into them and run `make' without going through this Makefile.
# To change the values of `make' variables: instead of editing Makefiles,
# (1) if the variable is set in `config.status', edit `config.status'
# (which will cause the Makefiles to be regenerated when you run `make');
# (2) otherwise, pass the desired values on the `make' command line.
$(RECURSIVE_TARGETS):
@set fnord $$MAKEFLAGS; amf=$$2; \
dot_seen=no; \
target=`echo $@ | sed s/-recursive//`; \
list='$(SUBDIRS)'; for subdir in $$list; do \
echo "Making $$target in $$subdir"; \
if test "$$subdir" = "."; then \
dot_seen=yes; \
local_target="$$target-am"; \
else \
local_target="$$target"; \
fi; \
(cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
|| case "$$amf" in *=*) exit 1;; *k*) fail=yes;; *) exit 1;; esac; \
done; \
if test "$$dot_seen" = "no"; then \
$(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
fi; test -z "$$fail"
mostlyclean-recursive clean-recursive distclean-recursive \
maintainer-clean-recursive:
@set fnord $$MAKEFLAGS; amf=$$2; \
dot_seen=no; \
case "$@" in \
distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
*) list='$(SUBDIRS)' ;; \
esac; \
rev=''; for subdir in $$list; do \
if test "$$subdir" = "."; then :; else \
rev="$$subdir $$rev"; \
fi; \
done; \
rev="$$rev ."; \
target=`echo $@ | sed s/-recursive//`; \
for subdir in $$rev; do \
echo "Making $$target in $$subdir"; \
if test "$$subdir" = "."; then \
local_target="$$target-am"; \
else \
local_target="$$target"; \
fi; \
(cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
|| case "$$amf" in *=*) exit 1;; *k*) fail=yes;; *) exit 1;; esac; \
done && test -z "$$fail"
tags-recursive:
list='$(SUBDIRS)'; for subdir in $$list; do \
test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \
done
ctags-recursive:
list='$(SUBDIRS)'; for subdir in $$list; do \
test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \
done
ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
@ -342,23 +399,10 @@ ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
mkid -fID $$unique
tags: TAGS
TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
tags=; \
here=`pwd`; \
if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
include_option=--etags-include; \
empty_fix=.; \
else \
include_option=--include; \
empty_fix=; \
fi; \
list='$(SUBDIRS)'; for subdir in $$list; do \
if test "$$subdir" = .; then :; else \
test ! -f $$subdir/TAGS || \
tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \
fi; \
done; \
list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
@ -371,7 +415,7 @@ TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
$$tags $$unique; \
fi
ctags: CTAGS
CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
CTAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
tags=; \
here=`pwd`; \
@ -423,21 +467,6 @@ distdir: $(DISTFILES)
|| exit 1; \
fi; \
done
list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
if test "$$subdir" = .; then :; else \
test -d "$(distdir)/$$subdir" \
|| $(mkdir_p) "$(distdir)/$$subdir" \
|| exit 1; \
distdir=`$(am__cd) $(distdir) && pwd`; \
top_distdir=`$(am__cd) $(top_distdir) && pwd`; \
(cd $$subdir && \
$(MAKE) $(AM_MAKEFLAGS) \
top_distdir="$$top_distdir" \
distdir="$$distdir/$$subdir" \
distdir) \
|| exit 1; \
fi; \
done
-find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \
! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
! -type d ! -perm -400 -exec chmod a+r {} \; -o \
@ -536,19 +565,21 @@ distcleancheck: distclean
$(distcleancheck_listfiles) ; \
exit 1; } >&2
check-am: all-am
check: check-recursive
all-am: Makefile config.h
installdirs: installdirs-recursive
installdirs-am:
install: install-recursive
install-exec: install-exec-recursive
install-data: install-data-recursive
uninstall: uninstall-recursive
check: check-am
all-am: Makefile $(LTLIBRARIES) config.h
installdirs:
for dir in "$(DESTDIR)$(libdir)"; do \
test -z "$$dir" || $(mkdir_p) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-recursive
installcheck: installcheck-am
install-strip:
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
@ -564,80 +595,79 @@ distclean-generic:
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-recursive
clean: clean-am
clean-am: clean-generic clean-libtool mostlyclean-am
clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \
mostlyclean-am
distclean: distclean-recursive
distclean: distclean-am
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-generic distclean-hdr \
distclean-libtool distclean-tags
distclean-am: clean-am distclean-compile distclean-generic \
distclean-hdr distclean-libtool distclean-tags
dvi: dvi-recursive
dvi: dvi-am
dvi-am:
html: html-recursive
html: html-am
info: info-recursive
info: info-am
info-am:
install-data-am:
install-exec-am:
install-exec-am: install-libLTLIBRARIES
install-info: install-info-recursive
install-info: install-info-am
install-man:
installcheck-am:
maintainer-clean: maintainer-clean-recursive
maintainer-clean: maintainer-clean-am
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf $(top_srcdir)/autom4te.cache
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-recursive
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-recursive
pdf: pdf-am
pdf-am:
ps: ps-recursive
ps: ps-am
ps-am:
uninstall-am: uninstall-info-am
uninstall-am: uninstall-info-am uninstall-libLTLIBRARIES
uninstall-info: uninstall-info-recursive
.PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am am--refresh check \
check-am clean clean-generic clean-libtool clean-recursive \
ctags ctags-recursive dist dist-all dist-bzip2 dist-gzip \
dist-shar dist-tarZ dist-zip distcheck distclean \
distclean-generic distclean-hdr distclean-libtool \
distclean-recursive distclean-tags distcleancheck distdir \
distuninstallcheck dvi dvi-am html html-am info info-am \
install install-am install-data install-data-am install-exec \
install-exec-am install-info install-info-am install-man \
install-strip installcheck installcheck-am installdirs \
installdirs-am maintainer-clean maintainer-clean-generic \
maintainer-clean-recursive mostlyclean mostlyclean-generic \
mostlyclean-libtool mostlyclean-recursive pdf pdf-am ps ps-am \
tags tags-recursive uninstall uninstall-am uninstall-info-am
.PHONY: CTAGS GTAGS all all-am am--refresh check check-am clean \
clean-generic clean-libLTLIBRARIES clean-libtool ctags dist \
dist-all dist-bzip2 dist-gzip dist-shar dist-tarZ dist-zip \
distcheck distclean distclean-compile distclean-generic \
distclean-hdr distclean-libtool distclean-tags distcleancheck \
distdir distuninstallcheck dvi dvi-am html html-am info \
info-am install install-am install-data install-data-am \
install-exec install-exec-am install-info install-info-am \
install-libLTLIBRARIES install-man install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags uninstall uninstall-am uninstall-info-am \
uninstall-libLTLIBRARIES
# Export $(PLATFORM) to prevent proliferation of sub-shells
export PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z])
# The following export is needed to build libhadoop.so in the 'lib' directory
export SUBDIRS
#
#vim: sw=4: ts=4: noet
#

1998
src/native/aclocal.m4 vendored

File diff suppressed because it is too large Load Diff

View File

@ -6,6 +6,9 @@
/* Define to 1 if you have the <dlfcn.h> header file. */
#undef HAVE_DLFCN_H
/* Define to 1 if you have the <fcntl.h> header file. */
#undef HAVE_FCNTL_H
/* Define to 1 if you have the <inttypes.h> header file. */
#undef HAVE_INTTYPES_H

5128
src/native/configure vendored

File diff suppressed because it is too large Load Diff

View File

@ -86,15 +86,16 @@ AC_SUBST([JNI_CPPFLAGS])
dnl Check for zlib headers
AC_CHECK_HEADERS([zlib.h zconf.h], AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.))
dnl Check for headers needed by the native Group resolution implementation
AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
# Checks for typedefs, structures, and compiler characteristics.
AC_C_CONST
# Checks for library functions.
AC_CHECK_FUNCS([memset])
AC_CONFIG_FILES([Makefile
src/org/apache/hadoop/io/compress/zlib/Makefile
lib/Makefile])
AC_CONFIG_FILES([Makefile])
AC_OUTPUT
#

View File

@ -0,0 +1,117 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <sys/types.h>
#include <sys/ipc.h>
#include <sys/shm.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <stdlib.h>
#include <errno.h>
#include <grp.h>
#include <stdio.h>
#include <pwd.h>
#include <string.h>
#include "org_apache_hadoop_security_JniBasedUnixGroupsMapping.h"
#include "org_apache_hadoop.h"
static jobjectArray emptyGroups = NULL;
JNIEXPORT jobjectArray JNICALL
Java_org_apache_hadoop_security_JniBasedUnixGroupsMapping_getGroupForUser
(JNIEnv *env, jobject jobj, jstring juser) {
extern int getGroupIDList(const char *user, int *ngroups, gid_t **groups);
extern int getGroupDetails(gid_t group, char **grpBuf);
jobjectArray jgroups;
int error = -1;
if (emptyGroups == NULL) {
jobjectArray lEmptyGroups = (jobjectArray)(*env)->NewObjectArray(env, 0,
(*env)->FindClass(env, "java/lang/String"), NULL);
if (lEmptyGroups == NULL) {
goto cleanup;
}
emptyGroups = (*env)->NewGlobalRef(env, lEmptyGroups);
if (emptyGroups == NULL) {
goto cleanup;
}
}
char *grpBuf = NULL;
const char *cuser = (*env)->GetStringUTFChars(env, juser, NULL);
if (cuser == NULL) {
goto cleanup;
}
/*Get the number of the groups, and their IDs, this user belongs to*/
gid_t *groups = NULL;
int ngroups = 0;
error = getGroupIDList(cuser, &ngroups, &groups);
if (error != 0) {
goto cleanup;
}
jgroups = (jobjectArray)(*env)->NewObjectArray(env, ngroups,
(*env)->FindClass(env, "java/lang/String"), NULL);
if (jgroups == NULL) {
error = -1;
goto cleanup;
}
/*Iterate over the groupIDs and get the group structure for each*/
int i = 0;
for (i = 0; i < ngroups; i++) {
error = getGroupDetails(groups[i],&grpBuf);
if (error != 0) {
goto cleanup;
}
jstring jgrp = (*env)->NewStringUTF(env, ((struct group*)grpBuf)->gr_name);
if (jgrp == NULL) {
error = -1;
goto cleanup;
}
(*env)->SetObjectArrayElement(env, jgroups,i,jgrp);
free(grpBuf);
grpBuf = NULL;
}
cleanup:
if (error == ENOMEM) {
THROW(env, "java/lang/OutOfMemoryError", NULL);
}
if (error == ENOENT) {
THROW(env, "java/io/IOException", "No entry for user");
}
if (groups != NULL) {
free(groups);
}
if (grpBuf != NULL) {
free(grpBuf);
}
if (cuser != NULL) {
(*env)->ReleaseStringUTFChars(env, juser, cuser);
}
if (error == 0) {
return jgroups;
} else {
return emptyGroups;
}
}

View File

@ -0,0 +1,189 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <grp.h>
#include <stdio.h>
#include <unistd.h>
#include <pwd.h>
#include <errno.h>
#include <string.h>
#include <stdlib.h>
/*Helper functions for the JNI implementation of unix group mapping service*/
/**
* Gets the group IDs for a given user. The groups argument is allocated
* internally, and it contains the list of groups. The ngroups is updated to
* the number of groups
* Returns 0 on success (on success, the caller must free the memory allocated
* internally)
*/
int getGroupIDList(const char *user, int *ngroups, gid_t **groups) {
int getPW(const char *user, char **pwbuf);
*ngroups = 0;
char *pwbuf = NULL;
*groups = NULL;
/*Look up the password database first*/
int error = getPW(user, &pwbuf);
if (error != 0) {
if (pwbuf != NULL) {
free(pwbuf);
}
return error;
}
struct passwd *pw = (struct passwd*)pwbuf;
int ng = 0;
/*Get the groupIDs that this user belongs to*/
if (getgrouplist(user, pw->pw_gid, NULL, &ng) < 0) {
*ngroups = ng;
*groups = (gid_t *) malloc(ng * sizeof (gid_t));
if (!*groups) {
*ngroups = 0;
free(pwbuf);
return ENOMEM;
}
if (getgrouplist(user, pw->pw_gid, *groups, &ng) < 0) {
*ngroups = 0;
free(pwbuf);
free(*groups);
*groups = NULL;
return ENOENT;
}
}
free(pwbuf);
return 0;
}
/**
* Gets the group structure for a given group ID.
* The grpBuf argument is allocated internally and it contains the
* struct group for the given group ID.
* Returns 0 on success (on success, the caller must free the memory allocated
* internally)
*/
int getGroupDetails(gid_t group, char **grpBuf) {
struct group * grp = NULL;
size_t currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
if (currBufferSize < 1024) {
currBufferSize = 1024;
}
*grpBuf = NULL;
char *buf = (char*)malloc(sizeof(char) * currBufferSize);
if (!buf) {
return ENOMEM;
}
int error;
for (;;) {
error = getgrgid_r(group, (struct group*)buf,
buf + sizeof(struct group),
currBufferSize - sizeof(struct group), &grp);
if(error != ERANGE) {
break;
}
free(buf);
currBufferSize *= 2;
buf = malloc(sizeof(char) * currBufferSize);
if(!buf) {
return ENOMEM;
}
}
if(!grp && !error) {
free(buf);
return ENOENT;
} else if (error) {
free(buf);
return error;
}
*grpBuf = buf;
return 0;
}
/**
* Gets the password database entry for a given user.
* The pwbuf argument is allocated internally and it contains the
* broken out fields for the password database entry
* Returns 0 on success (on success, the caller must free the memory allocated
* internally).
*/
int getPW(const char *user, char **pwbuf) {
struct passwd *pwbufp = NULL;
size_t currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
if (currBufferSize < 1024) {
currBufferSize = 1024;
}
*pwbuf = NULL;
char *buf = (char*)malloc(sizeof(char) * currBufferSize);
if (!buf) {
return ENOMEM;
}
int error;
for (;;) {
error = getpwnam_r(user, (struct passwd*)buf, buf + sizeof(struct passwd),
currBufferSize - sizeof(struct passwd), &pwbufp);
if (error != ERANGE) {
break;
}
free(buf);
currBufferSize *= 2;
buf = (char*)malloc(sizeof(char) * currBufferSize);
if (!buf) {
return ENOMEM;
}
}
if (!pwbufp && !error) {
free(buf);
return ENOENT;
} else if (error) {
free(buf);
return error;
}
*pwbuf = buf;
return 0;
}
#undef TESTING
#ifdef TESTING
/**
* A main() is provided so that quick testing of this
* library can be done.
*/
int main(int argc, char **argv) {
int ngroups;
gid_t *groups = NULL;
char *user = "ddas";
if (argc == 2) user = argv[1];
int error = getGroupIDList(user, &ngroups, &groups);
if (error != 0) {
printf("Couldn't obtain grp for user %s", user);
return;
}
int i;
for (i = 0; i < ngroups; i++) {
char *grpbuf = NULL;
error = getGroupDetails(groups[i], &grpbuf);
printf("grps[%d]: %s ",i, ((struct group*)grpbuf)->gr_name);
free(grpbuf);
}
free(groups);
return 0;
}
#endif

View File

@ -0,0 +1,76 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;
import static org.junit.Assume.assumeTrue;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.GroupMappingServiceProvider;
import org.apache.hadoop.security.JniBasedUnixGroupsMapping;
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Before;
import org.junit.Test;
public class TestJNIGroupsMapping {
@Before
public void isNativeCodeLoaded() {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
}
@Test
public void testJNIGroupsMapping() throws Exception {
//for the user running the test, check whether the
//ShellBasedUnixGroupsMapping and the JniBasedUnixGroupsMapping
//return the same groups
String user = UserGroupInformation.getCurrentUser().getShortUserName();
testForUser(user);
//check for a dummy non-existent user (both the implementations should
//return an empty list
testForUser("fooBarBaz1234DoesNotExist");
}
private void testForUser(String user) throws Exception {
GroupMappingServiceProvider g = new ShellBasedUnixGroupsMapping();
List<String> shellBasedGroups = g.getGroups(user);
g = new JniBasedUnixGroupsMapping();
List<String> jniBasedGroups = g.getGroups(user);
String[] shellBasedGroupsArray = shellBasedGroups.toArray(new String[0]);
Arrays.sort(shellBasedGroupsArray);
String[] jniBasedGroupsArray = jniBasedGroups.toArray(new String[0]);
Arrays.sort(jniBasedGroupsArray);
if (!Arrays.equals(shellBasedGroupsArray, jniBasedGroupsArray)) {
fail("Groups returned by " +
ShellBasedUnixGroupsMapping.class.getCanonicalName() +
" and " +
JniBasedUnixGroupsMapping.class.getCanonicalName() +
" didn't match for " + user);
}
}
}