Merge trunk into HA branch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1162279 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Todd Lipcon 2011-08-26 22:46:17 +00:00
commit 5116ea4487
4868 changed files with 16562 additions and 3918 deletions

View File

@ -15,12 +15,13 @@ Requirements:
----------------------------------------------------------------------------------
Maven modules:
hadoop (Main Hadoop project)
- hadoop-project (Parent POM for all Hadoop Maven modules. )
(All plugins & dependencies versions are defined here.)
- hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
- hadoop-common (Hadoop Common)
- hadoop-hdfs (Hadoop HDFS)
hadoop (Main Hadoop project)
- hadoop-project (Parent POM for all Hadoop Maven modules. )
(All plugins & dependencies versions are defined here.)
- hadoop-project-dist (Parent POM for modules that generate distributions.)
- hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
- hadoop-common (Hadoop Common)
- hadoop-hdfs (Hadoop HDFS)
----------------------------------------------------------------------------------
Where to run Maven from?
@ -43,15 +44,16 @@ Maven build goals:
* Run clover : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
* Run Rat : mvn apache-rat:check
* Build javadocs : mvn javadoc:javadoc
* Build TAR : mvn package [-Ptar][-Pbintar][-Pdocs][-Psrc][-Pnative]
* Build distribution : mvn package [-Pdist][-Pdocs][-Psrc][-Pnative][-Dtar]
Build options:
* Use -Pnative to compile/bundle native code
* Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
Snappy JNI bindings and to bundle Snappy SO files
* Use -Pdocs to generate & bundle the documentation in the TAR (using -Ptar)
* Use -Psrc to bundle the source in the TAR (using -Ptar)
* Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
* Use -Psrc to bundle the source in the distribution (using -Pdist)
* Use -Dtar to create a TAR with the distribution (using -Pdist)
Tests options:

View File

@ -56,18 +56,18 @@ fi
PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
# if we are at the project root then nothing more to do
if [[ -d hadoop-common ]]; then
if [[ -d hadoop-common-project ]]; then
echo Looks like this is being run at project root
# if all of the lines start with hadoop-common/, hadoop-hdfs/, or mapreduce/, this is
# if all of the lines start with hadoop-common/, hadoop-hdfs/, or hadoop-mapreduce/, this is
# relative to the hadoop root instead of the subproject root, so we need
# to chop off another layer
elif [[ "$PREFIX_DIRS" =~ ^(hadoop-hdfs|hadoop-common|mapreduce)$ ]]; then
elif [[ "$PREFIX_DIRS" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
echo Looks like this is relative to project root. Increasing PLEVEL
PLEVEL=$[$PLEVEL + 1]
elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hadoop-hdfs\|mapreduce' ; then
elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
echo Looks like this is a cross-subproject patch. Try applying from the project root
exit 1
fi

View File

@ -370,8 +370,8 @@ checkJavadocWarnings () {
if [ -d hadoop-project ]; then
(cd hadoop-project; $MVN install)
fi
if [ -d hadoop-annotations ]; then
(cd hadoop-annotations; $MVN install)
if [ -d hadoop-common-project/hadoop-annotations ]; then
(cd hadoop-common-project/hadoop-annotations; $MVN install)
fi
$MVN clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | $AWK '/Javadoc Warnings/,EOF' | $GREP warning | $AWK 'BEGIN {total = 0} {total += 1} END {print total}'`
@ -446,7 +446,7 @@ checkReleaseAuditWarnings () {
echo ""
echo "$MVN apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1"
$MVN apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
find . -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
find $BASEDIR -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
### Compare trunk and patch release audit warning numbers
if [[ -f $PATCH_DIR/patchReleaseAuditWarnings.txt ]] ; then
@ -520,8 +520,8 @@ checkFindbugsWarnings () {
echo "======================================================================"
echo ""
echo ""
echo "$MVN clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess"
$MVN clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess
echo "$MVN clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess"
$MVN clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess < /dev/null
if [ $? != 0 ] ; then
JIRA_COMMENT="$JIRA_COMMENT
@ -536,6 +536,7 @@ checkFindbugsWarnings () {
relative_file=${file#$BASEDIR/} # strip leading $BASEDIR prefix
if [ ! $relative_file == "target/findbugsXml.xml" ]; then
module_suffix=${relative_file%/target/findbugsXml.xml} # strip trailing path
module_suffix=`basename ${module_suffix}`
fi
cp $file $PATCH_DIR/patchFindbugsWarnings${module_suffix}.xml
@ -549,7 +550,7 @@ checkFindbugsWarnings () {
$FINDBUGS_HOME/bin/convertXmlToText -html \
$PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.xml \
$PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.html
JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/target/newPatchFindbugsWarnings${module_suffix}.html
JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/patchprocess/newPatchFindbugsWarnings${module_suffix}.html
$JIRA_COMMENT_FOOTER"
done
@ -567,42 +568,83 @@ $JIRA_COMMENT_FOOTER"
}
###############################################################################
### Run the test-core target
runCoreTests () {
### Run the tests
runTests () {
echo ""
echo ""
echo "======================================================================"
echo "======================================================================"
echo " Running core tests."
echo " Running tests."
echo "======================================================================"
echo "======================================================================"
echo ""
echo ""
### Kill any rogue build processes from the last attempt
$PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
PreTestTarget=""
if [[ $defect == MAPREDUCE-* ]] ; then
PreTestTarget="create-c++-configure"
fi
failed_tests=""
modules=$(findModules)
for module in $modules;
do
pushd $module
echo " Running tests in $module"
### Kill any rogue build processes from the last attempt
$PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
if [[ $? != 0 ]] ; then
### Find and format names of failed tests
failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/target/hadoop-common/surefire-reports/*.xml | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
if [[ $? != 0 ]] ; then
### Find and format names of failed tests
module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
failed_tests="${failed_tests}
${module_failed_tests}"
fi
popd
done
echo $failed_tests
if [[ -n "$failed_tests" ]] ; then
JIRA_COMMENT="$JIRA_COMMENT
-1 core tests. The patch failed these core unit tests:
-1 core tests. The patch failed these unit tests:
$failed_tests"
return 1
fi
JIRA_COMMENT="$JIRA_COMMENT
+1 core tests. The patch passed core unit tests."
+1 core tests. The patch passed unit tests in $modules."
return 0
}
###############################################################################
### Find the modules changed by the patch
findModules () {
# Come up with a list of changed files into $TMP
TMP=/tmp/tmp.paths.$$
$GREP '^+++\|^---' $PATCH_DIR/patch | cut -c '5-' | $GREP -v /dev/null | sort | uniq > $TMP
# if all of the lines start with a/ or b/, then this is a git patch that
# was generated without --no-prefix
if ! $GREP -qv '^a/\|^b/' $TMP ; then
sed -i -e 's,^[ab]/,,' $TMP
fi
PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
# if all of the lines start with hadoop-common-project/, hadoop-hdfs-project/, or hadoop-mapreduce-project/, this is
# relative to the hadoop root instead of the subproject root
if [[ "$PREFIX_DIRS" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
echo $PREFIX_DIRS
return 0
elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
echo $PREFIX_DIRS
return 0
fi
# No modules found. Running from current directory.
echo .
}
###############################################################################
### Run the test-contrib target
runContribTests () {
@ -658,6 +700,7 @@ checkInjectSystemFaults () {
#echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
#$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
echo "NOP"
return 0
if [[ $? != 0 ]] ; then
JIRA_COMMENT="$JIRA_COMMENT
@ -779,7 +822,7 @@ checkReleaseAuditWarnings
(( RESULT = RESULT + $? ))
### Do not call these when run by a developer
if [[ $JENKINS == "true" ]] ; then
runCoreTests
runTests
(( RESULT = RESULT + $? ))
runContribTests
(( RESULT = RESULT + $? ))

View File

@ -20,12 +20,12 @@
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>0.23.0-SNAPSHOT</version>
<version>0.24.0-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId>
<version>0.23.0-SNAPSHOT</version>
<version>0.24.0-SNAPSHOT</version>
<name>Apache Hadoop Assemblies</name>
<description>Apache Hadoop Assemblies</description>

View File

@ -15,7 +15,7 @@
limitations under the License.
-->
<assembly>
<id>hadoop-bintar</id>
<id>hadoop-distro</id>
<formats>
<format>dir</format>
</formats>
@ -96,6 +96,14 @@
<directory>${project.build.directory}/site/jdiff/xml</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.build.directory}/site</directory>
<outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.build.directory}/src</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/src</outputDirectory>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
@ -105,7 +113,6 @@
<useProjectArtifact>false</useProjectArtifact>
<excludes>
<exclude>org.apache.ant:*:jar</exclude>
<exclude>org.apache.hadoop:hadoop-*:jar</exclude>
<exclude>jdiff:jdiff:jar</exclude>
</excludes>
</dependencySet>

View File

@ -1,95 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<assembly>
<id>hadoop-tar</id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<directory>${basedir}</directory>
<outputDirectory>/</outputDirectory>
<includes>
<include>*.txt</include>
</includes>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/bin</directory>
<outputDirectory>/bin</outputDirectory>
<includes>
<include>*</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/bin</directory>
<outputDirectory>/libexec</outputDirectory>
<includes>
<include>*-config.sh</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/conf</directory>
<outputDirectory>/conf</outputDirectory>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/webapps</directory>
<outputDirectory>/webapps</outputDirectory>
<excludes>
<exclude>proto-*-web.xml</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>${project.build.directory}/webapps</directory>
<outputDirectory>/webapps</outputDirectory>
<excludes>
<exclude>proto-*-web.xml</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>${project.build.directory}/site</directory>
<outputDirectory>/docs</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.build.directory}</directory>
<outputDirectory>/</outputDirectory>
<includes>
<include>${project.artifactId}-${project.version}.jar</include>
<include>${project.artifactId}-${project.version}-tests.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.build.directory}/src</directory>
<outputDirectory>/src</outputDirectory>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<outputDirectory>/lib</outputDirectory>
<unpack>false</unpack>
<scope>runtime</scope>
<useProjectArtifact>false</useProjectArtifact>
<excludes>
<exclude>org.apache.ant:*:jar</exclude>
<exclude>org.apache.hadoop:hadoop-*:*:*:*</exclude>
<exclude>jdiff:jdiff:jar</exclude>
</excludes>
</dependencySet>
</dependencySets>
</assembly>

View File

@ -17,12 +17,12 @@
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>0.23.0-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath>
<version>0.24.0-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>0.23.0-SNAPSHOT</version>
<version>0.24.0-SNAPSHOT</version>
<description>Apache Hadoop Annotations</description>
<name>Apache Hadoop Annotations</name>
<packaging>jar</packaging>

View File

@ -0,0 +1,20 @@
Build instructions for Hadoop Alfredo
Same as for Hadoop.
For more details refer to the Alfredo documentation pages.
-----------------------------------------------------------------------------
Caveats:
* Alfredo has profile to enable Kerberos testcases (testKerberos)
To run Kerberos testcases a KDC, 2 kerberos principals and a keytab file
are required (refer to the Alfredo documentation pages for details).
* Alfredo does not have a distribution profile (dist)
* Alfredo does not have a native code profile (native)
-----------------------------------------------------------------------------

View File

@ -0,0 +1,15 @@
Hadoop Alfredo, Java HTTP SPNEGO
Hadoop Alfredo is a Java library consisting of a client and a server
components to enable Kerberos SPNEGO authentication for HTTP.
The client component is the AuthenticatedURL class.
The server component is the AuthenticationFilter servlet filter class.
Authentication mechanisms support is pluggable in both the client and
the server components via interfaces.
In addition to Kerberos SPNEGO, Alfredo also supports Pseudo/Simple
authentication (trusting the value of the query string parameter
'user.name').

View File

@ -0,0 +1,210 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>0.24.0-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-alfredo</artifactId>
<version>0.24.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>Apache Hadoop Alfredo</name>
<description>Apache Hadoop Alfredo - Java HTTP SPNEGO</description>
<url>http://hadoop.apache.org/alfredo</url>
<properties>
<maven.build.timestamp.format>yyyyMMdd</maven.build.timestamp.format>
<kerberos.realm>LOCALHOST</kerberos.realm>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<testResources>
<testResource>
<directory>${basedir}/src/test/resources</directory>
<filtering>true</filtering>
<includes>
<include>krb5.conf</include>
</includes>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkMode>always</forkMode>
<forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
<systemPropertyVariables>
<java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
<kerberos.realm>${kerberos.realm}</kerberos.realm>
</systemPropertyVariables>
<excludes>
<exclude>**/${test.exclude}.java</exclude>
<exclude>${test.exclude.pattern}</exclude>
<exclude>**/TestKerberosAuth*.java</exclude>
<exclude>**/Test*$*.java</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<phase>prepare-package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<attach>true</attach>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>testKerberos</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkMode>always</forkMode>
<forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
<systemPropertyVariables>
<java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
<kerberos.realm>${kerberos.realm}</kerberos.realm>
</systemPropertyVariables>
<excludes>
<exclude>**/${test.exclude}.java</exclude>
<exclude>${test.exclude.pattern}</exclude>
<exclude>**/Test*$*.java</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>docs</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>site</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<executions>
<execution>
<configuration>
<dependencyLocationsEnabled>false</dependencyLocationsEnabled>
</configuration>
<phase>package</phase>
<goals>
<goal>dependencies</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>javadoc</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,76 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>0.24.0-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-alfredo-examples</artifactId>
<version>0.24.0-SNAPSHOT</version>
<packaging>war</packaging>
<name>Hadoop Alfredo Examples</name>
<description>Hadoop Alfredo - Java HTTP SPNEGO Examples</description>
<dependencies>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-alfredo</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>java</goal>
</goals>
</execution>
</executions>
<configuration>
<mainClass>org.apache.hadoop.alfredo.examples.WhoClient</mainClass>
<arguments>
<argument>${url}</argument>
</arguments>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,183 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.examples;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Servlet filter that logs HTTP request/response headers
*/
public class RequestLoggerFilter implements Filter {
private static Logger LOG = LoggerFactory.getLogger(RequestLoggerFilter.class);
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
if (!LOG.isDebugEnabled()) {
filterChain.doFilter(request, response);
}
else {
XHttpServletRequest xRequest = new XHttpServletRequest((HttpServletRequest) request);
XHttpServletResponse xResponse = new XHttpServletResponse((HttpServletResponse) response);
try {
LOG.debug(xRequest.getResquestInfo().toString());
filterChain.doFilter(xRequest, xResponse);
}
finally {
LOG.debug(xResponse.getResponseInfo().toString());
}
}
}
@Override
public void destroy() {
}
private static class XHttpServletRequest extends HttpServletRequestWrapper {
public XHttpServletRequest(HttpServletRequest request) {
super(request);
}
public StringBuffer getResquestInfo() {
StringBuffer sb = new StringBuffer(512);
sb.append("\n").append("> ").append(getMethod()).append(" ").append(getRequestURL());
if (getQueryString() != null) {
sb.append("?").append(getQueryString());
}
sb.append("\n");
Enumeration names = getHeaderNames();
while (names.hasMoreElements()) {
String name = (String) names.nextElement();
Enumeration values = getHeaders(name);
while (values.hasMoreElements()) {
String value = (String) values.nextElement();
sb.append("> ").append(name).append(": ").append(value).append("\n");
}
}
sb.append(">");
return sb;
}
}
private static class XHttpServletResponse extends HttpServletResponseWrapper {
private Map<String, List<String>> headers = new HashMap<String, List<String>>();
private int status;
private String message;
public XHttpServletResponse(HttpServletResponse response) {
super(response);
}
private List<String> getHeaderValues(String name, boolean reset) {
List<String> values = headers.get(name);
if (reset || values == null) {
values = new ArrayList<String>();
headers.put(name, values);
}
return values;
}
@Override
public void addCookie(Cookie cookie) {
super.addCookie(cookie);
List<String> cookies = getHeaderValues("Set-Cookie", false);
cookies.add(cookie.getName() + "=" + cookie.getValue());
}
@Override
public void sendError(int sc, String msg) throws IOException {
super.sendError(sc, msg);
status = sc;
message = msg;
}
@Override
public void sendError(int sc) throws IOException {
super.sendError(sc);
status = sc;
}
@Override
public void setStatus(int sc) {
super.setStatus(sc);
status = sc;
}
@Override
public void setStatus(int sc, String msg) {
super.setStatus(sc, msg);
status = sc;
message = msg;
}
@Override
public void setHeader(String name, String value) {
super.setHeader(name, value);
List<String> values = getHeaderValues(name, true);
values.add(value);
}
@Override
public void addHeader(String name, String value) {
super.addHeader(name, value);
List<String> values = getHeaderValues(name, false);
values.add(value);
}
public StringBuffer getResponseInfo() {
if (status == 0) {
status = 200;
message = "OK";
}
StringBuffer sb = new StringBuffer(512);
sb.append("\n").append("< ").append("status code: ").append(status);
if (message != null) {
sb.append(", message: ").append(message);
}
sb.append("\n");
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
for (String value : entry.getValue()) {
sb.append("< ").append(entry.getKey()).append(": ").append(value).append("\n");
}
}
sb.append("<");
return sb;
}
}
}

View File

@ -0,0 +1,57 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.examples;
import org.apache.hadoop.alfredo.client.AuthenticatedURL;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Example that uses <code>AuthenticatedURL</code>.
*/
public class WhoClient {
public static void main(String[] args) {
try {
if (args.length != 1) {
System.err.println("Usage: <URL>");
System.exit(-1);
}
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
URL url = new URL(args[0]);
HttpURLConnection conn = new AuthenticatedURL().openConnection(url, token);
System.out.println();
System.out.println("Token value: " + token);
System.out.println("Status code: " + conn.getResponseCode() + " " + conn.getResponseMessage());
System.out.println();
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line = reader.readLine();
while (line != null) {
System.out.println(line);
line = reader.readLine();
}
reader.close();
}
System.out.println();
}
catch (Exception ex) {
System.err.println("ERROR: " + ex.getMessage());
System.exit(-1);
}
}
}

View File

@ -0,0 +1,43 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.examples;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.Writer;
import java.text.MessageFormat;
/**
* Example servlet that returns the user and principal of the request.
*/
public class WhoServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.setContentType("text/plain");
resp.setStatus(HttpServletResponse.SC_OK);
String user = req.getRemoteUser();
String principal = (req.getUserPrincipal() != null) ? req.getUserPrincipal().getName() : null;
Writer writer = resp.getWriter();
writer.write(MessageFormat.format("You are: user[{0}] principal[{1}]\n", user, principal));
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
doGet(req, resp);
}
}

View File

@ -0,0 +1,19 @@
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
log4j.appender.test=org.apache.log4j.ConsoleAppender
log4j.appender.test.Target=System.out
log4j.appender.test.layout=org.apache.log4j.PatternLayout
log4j.appender.test.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
log4j.logger.org.apache.hadoop.alfredo=DEBUG, test

View File

@ -0,0 +1,117 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
<servlet>
<servlet-name>whoServlet</servlet-name>
<servlet-class>org.apache.hadoop.alfredo.examples.WhoServlet</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>whoServlet</servlet-name>
<url-pattern>/anonymous/who</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>whoServlet</servlet-name>
<url-pattern>/simple/who</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>whoServlet</servlet-name>
<url-pattern>/kerberos/who</url-pattern>
</servlet-mapping>
<filter>
<filter-name>requestLoggerFilter</filter-name>
<filter-class>org.apache.hadoop.alfredo.examples.RequestLoggerFilter</filter-class>
</filter>
<filter>
<filter-name>anonymousFilter</filter-name>
<filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
<init-param>
<param-name>type</param-name>
<param-value>simple</param-value>
</init-param>
<init-param>
<param-name>simple.anonymous.allowed</param-name>
<param-value>true</param-value>
</init-param>
<init-param>
<param-name>token.validity</param-name>
<param-value>30</param-value>
</init-param>
</filter>
<filter>
<filter-name>simpleFilter</filter-name>
<filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
<init-param>
<param-name>type</param-name>
<param-value>simple</param-value>
</init-param>
<init-param>
<param-name>simple.anonymous.allowed</param-name>
<param-value>false</param-value>
</init-param>
<init-param>
<param-name>token.validity</param-name>
<param-value>30</param-value>
</init-param>
</filter>
<filter>
<filter-name>kerberosFilter</filter-name>
<filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
<init-param>
<param-name>type</param-name>
<param-value>kerberos</param-value>
</init-param>
<init-param>
<param-name>kerberos.principal</param-name>
<param-value>HTTP/localhost@LOCALHOST</param-value>
</init-param>
<init-param>
<param-name>kerberos.keytab</param-name>
<param-value>/tmp/alfredo.keytab</param-value>
</init-param>
<init-param>
<param-name>token.validity</param-name>
<param-value>30</param-value>
</init-param>
</filter>
<filter-mapping>
<filter-name>requestLoggerFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<filter-mapping>
<filter-name>anonymousFilter</filter-name>
<url-pattern>/anonymous/*</url-pattern>
</filter-mapping>
<filter-mapping>
<filter-name>simpleFilter</filter-name>
<url-pattern>/simple/*</url-pattern>
</filter-mapping>
<filter-mapping>
<filter-name>kerberosFilter</filter-name>
<url-pattern>/kerberos/*</url-pattern>
</filter-mapping>
</web-app>

View File

@ -0,0 +1,18 @@
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<html>
<body>
<h1>Hello Hadoop Alfredo Pseudo/Simple Authentication with anonymous users!</h1>
</body>
</html>

View File

@ -0,0 +1,18 @@
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<html>
<body>
<h1>Hello Hadoop Alfredo Examples</h1>
</body>
</html>

View File

@ -0,0 +1,18 @@
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<html>
<body>
<h1>Hello Hadoop Alfredo Kerberos SPNEGO Authentication!</h1>
</body>
</html>

View File

@ -0,0 +1,18 @@
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<html>
<body>
<h1>Hello Hadoop Alfredo Pseudo/Simple Authentication!</h1>
</body>
</html>

View File

@ -0,0 +1,274 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import org.apache.hadoop.alfredo.server.AuthenticationFilter;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import java.util.Map;
/**
* The {@link AuthenticatedURL} class enables the use of the JDK {@link URL} class
* against HTTP endpoints protected with the {@link AuthenticationFilter}.
* <p/>
* The authentication mechanisms supported by default are Hadoop Simple authentication
* (also known as pseudo authentication) and Kerberos SPNEGO authentication.
* <p/>
* Additional authentication mechanisms can be supported via {@link Authenticator} implementations.
* <p/>
* The default {@link Authenticator} is the {@link KerberosAuthenticator} class which supports
* automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication.
* <p/>
* <code>AuthenticatedURL</code> instances are not thread-safe.
* <p/>
* The usage pattern of the {@link AuthenticatedURL} is:
* <p/>
* <pre>
*
* // establishing an initial connection
*
* URL url = new URL("http://foo:8080/bar");
* AuthenticatedURL.Token token = new AuthenticatedURL.Token();
* AuthenticatedURL aUrl = new AuthenticatedURL();
* HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
* ....
* // use the 'conn' instance
* ....
*
* // establishing a follow up connection using a token from the previous connection
*
* HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
* ....
* // use the 'conn' instance
* ....
*
* </pre>
*/
public class AuthenticatedURL {
/**
* Name of the HTTP cookie used for the authentication token between the client and the server.
*/
public static final String AUTH_COOKIE = "alfredo.auth";
private static final String AUTH_COOKIE_EQ = AUTH_COOKIE + "=";
/**
* Client side authentication token.
*/
public static class Token {
private String token;
/**
* Creates a token.
*/
public Token() {
}
/**
* Creates a token using an existing string representation of the token.
*
* @param tokenStr string representation of the tokenStr.
*/
public Token(String tokenStr) {
if (tokenStr == null) {
throw new IllegalArgumentException("tokenStr cannot be null");
}
set(tokenStr);
}
/**
* Returns if a token from the server has been set.
*
* @return if a token from the server has been set.
*/
public boolean isSet() {
return token != null;
}
/**
* Sets a token.
*
* @param tokenStr string representation of the tokenStr.
*/
void set(String tokenStr) {
token = tokenStr;
}
/**
* Returns the string representation of the token.
*
* @return the string representation of the token.
*/
@Override
public String toString() {
return token;
}
/**
* Return the hashcode for the token.
*
* @return the hashcode for the token.
*/
@Override
public int hashCode() {
return (token != null) ? token.hashCode() : 0;
}
/**
* Return if two token instances are equal.
*
* @param o the other token instance.
*
* @return if this instance and the other instance are equal.
*/
@Override
public boolean equals(Object o) {
boolean eq = false;
if (o instanceof Token) {
Token other = (Token) o;
eq = (token == null && other.token == null) || (token != null && this.token.equals(other.token));
}
return eq;
}
}
private static Class<? extends Authenticator> DEFAULT_AUTHENTICATOR = KerberosAuthenticator.class;
/**
* Sets the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
* is created without specifying an authenticator.
*
* @param authenticator the authenticator class to use as default.
*/
public static void setDefaultAuthenticator(Class<? extends Authenticator> authenticator) {
DEFAULT_AUTHENTICATOR = authenticator;
}
/**
* Returns the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
* is created without specifying an authenticator.
*
* @return the authenticator class to use as default.
*/
public static Class<? extends Authenticator> getDefaultAuthenticator() {
return DEFAULT_AUTHENTICATOR;
}
private Authenticator authenticator;
/**
* Creates an {@link AuthenticatedURL}.
*/
public AuthenticatedURL() {
this(null);
}
/**
* Creates an <code>AuthenticatedURL</code>.
*
* @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
* KerberosAuthenticator} is used.
*/
public AuthenticatedURL(Authenticator authenticator) {
try {
this.authenticator = (authenticator != null) ? authenticator : DEFAULT_AUTHENTICATOR.newInstance();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Returns an authenticated {@link HttpURLConnection}.
*
* @param url the URL to connect to. Only HTTP/S URLs are supported.
* @param token the authentication token being used for the user.
*
* @return an authenticated {@link HttpURLConnection}.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
public HttpURLConnection openConnection(URL url, Token token) throws IOException, AuthenticationException {
if (url == null) {
throw new IllegalArgumentException("url cannot be NULL");
}
if (!url.getProtocol().equalsIgnoreCase("http") && !url.getProtocol().equalsIgnoreCase("https")) {
throw new IllegalArgumentException("url must be for a HTTP or HTTPS resource");
}
if (token == null) {
throw new IllegalArgumentException("token cannot be NULL");
}
authenticator.authenticate(url, token);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
injectToken(conn, token);
return conn;
}
/**
* Helper method that injects an authentication token to send with a connection.
*
* @param conn connection to inject the authentication token into.
* @param token authentication token to inject.
*/
public static void injectToken(HttpURLConnection conn, Token token) {
String t = token.token;
if (t != null) {
if (!t.startsWith("\"")) {
t = "\"" + t + "\"";
}
conn.addRequestProperty("Cookie", AUTH_COOKIE_EQ + t);
}
}
/**
* Helper method that extracts an authentication token received from a connection.
* <p/>
* This method is used by {@link Authenticator} implementations.
*
* @param conn connection to extract the authentication token from.
* @param token the authentication token.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
*/
public static void extractToken(HttpURLConnection conn, Token token) throws IOException, AuthenticationException {
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
Map<String, List<String>> headers = conn.getHeaderFields();
List<String> cookies = headers.get("Set-Cookie");
if (cookies != null) {
for (String cookie : cookies) {
if (cookie.startsWith(AUTH_COOKIE_EQ)) {
String value = cookie.substring(AUTH_COOKIE_EQ.length());
int separator = value.indexOf(";");
if (separator > -1) {
value = value.substring(0, separator);
}
if (value.length() > 0) {
token.set(value);
}
}
}
}
} else {
throw new AuthenticationException("Authentication failed, status: " + conn.getResponseCode() +
", message: " + conn.getResponseMessage());
}
}
}

View File

@ -0,0 +1,50 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
/**
* Exception thrown when an authentication error occurrs.
*/
public class AuthenticationException extends Exception {
static final long serialVersionUID = 0;
/**
* Creates an {@link AuthenticationException}.
*
* @param cause original exception.
*/
public AuthenticationException(Throwable cause) {
super(cause);
}
/**
* Creates an {@link AuthenticationException}.
*
* @param msg exception message.
*/
public AuthenticationException(String msg) {
super(msg);
}
/**
* Creates an {@link AuthenticationException}.
*
* @param msg exception message.
* @param cause original exception.
*/
public AuthenticationException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -0,0 +1,39 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import java.io.IOException;
import java.net.URL;
/**
* Interface for client authentication mechanisms.
* <p/>
* Implementations are use-once instances, they don't need to be thread safe.
*/
public interface Authenticator {
/**
* Authenticates against a URL and returns a {@link AuthenticatedURL.Token} to be
* used by subsequent requests.
*
* @param url the URl to authenticate against.
* @param token the authentication token being used for the user.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication error occurred.
*/
public void authenticate(URL url, AuthenticatedURL.Token token) throws IOException, AuthenticationException;
}

View File

@ -0,0 +1,270 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import com.sun.security.auth.module.Krb5LoginModule;
import org.apache.commons.codec.binary.Base64;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import sun.security.jgss.GSSUtil;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
/**
* The {@link KerberosAuthenticator} implements the Kerberos SPNEGO authentication sequence.
* <p/>
* It uses the default principal for the Kerberos cache (normally set via kinit).
* <p/>
* It falls back to the {@link PseudoAuthenticator} if the HTTP endpoint does not trigger an SPNEGO authentication
* sequence.
*/
public class KerberosAuthenticator implements Authenticator {
/**
* HTTP header used by the SPNEGO server endpoint during an authentication sequence.
*/
public static String WWW_AUTHENTICATE = "WWW-Authenticate";
/**
* HTTP header used by the SPNEGO client endpoint during an authentication sequence.
*/
public static String AUTHORIZATION = "Authorization";
/**
* HTTP header prefix used by the SPNEGO client/server endpoints during an authentication sequence.
*/
public static String NEGOTIATE = "Negotiate";
private static final String AUTH_HTTP_METHOD = "OPTIONS";
/*
* Defines the Kerberos configuration that will be used to obtain the Kerberos principal from the
* Kerberos cache.
*/
private static class KerberosConfiguration extends Configuration {
private static final String OS_LOGIN_MODULE_NAME;
private static final boolean windows = System.getProperty("os.name").startsWith("Windows");
static {
if (windows) {
OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.NTLoginModule";
} else {
OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.UnixLoginModule";
}
}
private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
new HashMap<String, String>());
private static final Map<String, String> USER_KERBEROS_OPTIONS = new HashMap<String, String>();
static {
USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
USER_KERBEROS_OPTIONS.put("renewTGT", "true");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
}
}
private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
new AppConfigurationEntry(Krb5LoginModule.class.getName(),
AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
USER_KERBEROS_OPTIONS);
private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN};
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
return USER_KERBEROS_CONF;
}
}
static {
javax.security.auth.login.Configuration.setConfiguration(new KerberosConfiguration());
}
private URL url;
private HttpURLConnection conn;
private Base64 base64;
/**
* Performs SPNEGO authentication against the specified URL.
* <p/>
* If a token is given it does a NOP and returns the given token.
* <p/>
* If no token is given, it will perform the SPNEGO authentication sequence using an
* HTTP <code>OPTIONS</code> request.
*
* @param url the URl to authenticate against.
* @param token the authentication token being used for the user.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication error occurred.
*/
@Override
public void authenticate(URL url, AuthenticatedURL.Token token)
throws IOException, AuthenticationException {
if (!token.isSet()) {
this.url = url;
base64 = new Base64(0);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(AUTH_HTTP_METHOD);
conn.connect();
if (isNegotiate()) {
doSpnegoSequence(token);
} else {
getFallBackAuthenticator().authenticate(url, token);
}
}
}
/**
* If the specified URL does not support SPNEGO authentication, a fallback {@link Authenticator} will be used.
* <p/>
* This implementation returns a {@link PseudoAuthenticator}.
*
* @return the fallback {@link Authenticator}.
*/
protected Authenticator getFallBackAuthenticator() {
return new PseudoAuthenticator();
}
/*
* Indicates if the response is starting a SPNEGO negotiation.
*/
private boolean isNegotiate() throws IOException {
boolean negotiate = false;
if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
}
return negotiate;
}
/**
* Implements the SPNEGO authentication sequence interaction using the current default principal
* in the Kerberos cache (normally set via kinit).
*
* @param token the authentication token being used for the user.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication error occurred.
*/
private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AuthenticationException {
try {
AccessControlContext context = AccessController.getContext();
Subject subject = Subject.getSubject(context);
if (subject == null) {
subject = new Subject();
LoginContext login = new LoginContext("", subject);
login.login();
}
Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
GSSContext gssContext = null;
try {
GSSManager gssManager = GSSManager.getInstance();
String servicePrincipal = "HTTP/" + KerberosAuthenticator.this.url.getHost();
GSSName serviceName = gssManager.createName(servicePrincipal,
GSSUtil.NT_GSS_KRB5_PRINCIPAL);
gssContext = gssManager.createContext(serviceName, GSSUtil.GSS_KRB5_MECH_OID, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestCredDeleg(true);
gssContext.requestMutualAuth(true);
byte[] inToken = new byte[0];
byte[] outToken;
boolean established = false;
// Loop while the context is still not established
while (!established) {
outToken = gssContext.initSecContext(inToken, 0, inToken.length);
if (outToken != null) {
sendToken(outToken);
}
if (!gssContext.isEstablished()) {
inToken = readToken();
} else {
established = true;
}
}
} finally {
if (gssContext != null) {
gssContext.dispose();
gssContext = null;
}
}
return null;
}
});
} catch (PrivilegedActionException ex) {
throw new AuthenticationException(ex.getException());
} catch (LoginException ex) {
throw new AuthenticationException(ex);
}
AuthenticatedURL.extractToken(conn, token);
}
/*
* Sends the Kerberos token to the server.
*/
private void sendToken(byte[] outToken) throws IOException, AuthenticationException {
String token = base64.encodeToString(outToken);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(AUTH_HTTP_METHOD);
conn.setRequestProperty(AUTHORIZATION, NEGOTIATE + " " + token);
conn.connect();
}
/*
* Retrieves the Kerberos token returned by the server.
*/
private byte[] readToken() throws IOException, AuthenticationException {
int status = conn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE +
"' header incorrect: " + authHeader);
}
String negotiation = authHeader.trim().substring((NEGOTIATE + " ").length()).trim();
return base64.decode(negotiation);
}
throw new AuthenticationException("Invalid SPNEGO sequence, status code: " + status);
}
}

View File

@ -0,0 +1,74 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* The {@link PseudoAuthenticator} implementation provides an authentication equivalent to Hadoop's
* Simple authentication, it trusts the value of the 'user.name' Java System property.
* <p/>
* The 'user.name' value is propagated using an additional query string parameter {@link #USER_NAME} ('user.name').
*/
public class PseudoAuthenticator implements Authenticator {
/**
* Name of the additional parameter that carries the 'user.name' value.
*/
public static final String USER_NAME = "user.name";
private static final String USER_NAME_EQ = USER_NAME + "=";
/**
* Performs simple authentication against the specified URL.
* <p/>
* If a token is given it does a NOP and returns the given token.
* <p/>
* If no token is given, it will perform an HTTP <code>OPTIONS</code> request injecting an additional
* parameter {@link #USER_NAME} in the query string with the value returned by the {@link #getUserName()}
* method.
* <p/>
* If the response is successful it will update the authentication token.
*
* @param url the URl to authenticate against.
* @param token the authencation token being used for the user.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication error occurred.
*/
@Override
public void authenticate(URL url, AuthenticatedURL.Token token) throws IOException, AuthenticationException {
String strUrl = url.toString();
String paramSeparator = (strUrl.contains("?")) ? "&" : "?";
strUrl += paramSeparator + USER_NAME_EQ + getUserName();
url = new URL(strUrl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("OPTIONS");
conn.connect();
AuthenticatedURL.extractToken(conn, token);
}
/**
* Returns the current user name.
* <p/>
* This implementation returns the value of the Java system property 'user.name'
*
* @return the current user name.
*/
protected String getUserName() {
return System.getProperty("user.name");
}
}

View File

@ -0,0 +1,402 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticatedURL;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import org.apache.hadoop.alfredo.util.Signer;
import org.apache.hadoop.alfredo.util.SignerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.security.Principal;
import java.util.Enumeration;
import java.util.Properties;
import java.util.Random;
/**
* The {@link AuthenticationFilter} enables protecting web application resources with different (pluggable)
* authentication mechanisms.
* <p/>
* Out of the box it provides 2 authentication mechanisms: Pseudo and Kerberos SPNEGO.
* <p/>
* Additional authentication mechanisms are supported via the {@link AuthenticationHandler} interface.
* <p/>
* This filter delegates to the configured authentication handler for authentication and once it obtains an
* {@link AuthenticationToken} from it, sets a signed HTTP cookie with the token. For client requests
* that provide the signed HTTP cookie, it verifies the validity of the cookie, extracts the user information
* and lets the request proceed to the target resource.
* <p/>
* The supported configuration properties are:
* <ul>
* <li>config.prefix: indicates the prefix to be used by all other configuration properties, the default value
* is no prefix. See below for details on how/why this prefix is used.</li>
* <li>[#PREFIX#.]type: simple|kerberos|#CLASS#, 'simple' is short for the
* {@link PseudoAuthenticationHandler}, 'kerberos' is short for {@link KerberosAuthenticationHandler}, otherwise
* the full class name of the {@link AuthenticationHandler} must be specified.</li>
* <li>[#PREFIX#.]signature.secret: the secret used to sign the HTTP cookie value. The default value is a random
* value. Unless multiple webapp instances need to share the secret the random value is adequate.</li>
* <li>[#PREFIX#.]token.validity: time -in seconds- that the generated token is valid before a
* new authentication is triggered, default value is <code>3600</code> seconds.</li>
* <li>[#PREFIX#.]cookie.domain: domain to use for the HTTP cookie that stores the authentication token.</li>
* <li>[#PREFIX#.]cookie.path: path to use for the HTTP cookie that stores the authentication token.</li>
* </ul>
* <p/>
* The rest of the configuration properties are specific to the {@link AuthenticationHandler} implementation and the
* {@link AuthenticationFilter} will take all the properties that start with the prefix #PREFIX#, it will remove
* the prefix from it and it will pass them to the the authentication handler for initialization. Properties that do
* not start with the prefix will not be passed to the authentication handler initialization.
*/
public class AuthenticationFilter implements Filter {
private static Logger LOG = LoggerFactory.getLogger(AuthenticationFilter.class);
/**
* Constant for the property that specifies the configuration prefix.
*/
public static final String CONFIG_PREFIX = "config.prefix";
/**
* Constant for the property that specifies the authentication handler to use.
*/
public static final String AUTH_TYPE = "type";
/**
* Constant for the property that specifies the secret to use for signing the HTTP Cookies.
*/
public static final String SIGNATURE_SECRET = "signature.secret";
/**
* Constant for the configuration property that indicates the validity of the generated token.
*/
public static final String AUTH_TOKEN_VALIDITY = "token.validity";
/**
* Constant for the configuration property that indicates the domain to use in the HTTP cookie.
*/
public static final String COOKIE_DOMAIN = "cookie.domain";
/**
* Constant for the configuration property that indicates the path to use in the HTTP cookie.
*/
public static final String COOKIE_PATH = "cookie.path";
private Signer signer;
private AuthenticationHandler authHandler;
private boolean randomSecret;
private long validity;
private String cookieDomain;
private String cookiePath;
/**
* Initializes the authentication filter.
* <p/>
* It instantiates and initializes the specified {@link AuthenticationHandler}.
* <p/>
*
* @param filterConfig filter configuration.
*
* @throws ServletException thrown if the filter or the authentication handler could not be initialized properly.
*/
@Override
public void init(FilterConfig filterConfig) throws ServletException {
String configPrefix = filterConfig.getInitParameter(CONFIG_PREFIX);
configPrefix = (configPrefix != null) ? configPrefix + "." : "";
Properties config = getConfiguration(configPrefix, filterConfig);
String authHandlerName = config.getProperty(AUTH_TYPE, null);
String authHandlerClassName;
if (authHandlerName == null) {
throw new ServletException("Authentication type must be specified: simple|kerberos|<class>");
}
if (authHandlerName.equals("simple")) {
authHandlerClassName = PseudoAuthenticationHandler.class.getName();
} else if (authHandlerName.equals("kerberos")) {
authHandlerClassName = KerberosAuthenticationHandler.class.getName();
} else {
authHandlerClassName = authHandlerName;
}
try {
Class klass = Thread.currentThread().getContextClassLoader().loadClass(authHandlerClassName);
authHandler = (AuthenticationHandler) klass.newInstance();
authHandler.init(config);
} catch (ClassNotFoundException ex) {
throw new ServletException(ex);
} catch (InstantiationException ex) {
throw new ServletException(ex);
} catch (IllegalAccessException ex) {
throw new ServletException(ex);
}
String signatureSecret = config.getProperty(configPrefix + SIGNATURE_SECRET);
if (signatureSecret == null) {
signatureSecret = Long.toString(new Random(System.currentTimeMillis()).nextLong());
randomSecret = true;
LOG.warn("'signature.secret' configuration not set, using a random value as secret");
}
signer = new Signer(signatureSecret.getBytes());
validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000")) * 1000; //10 hours
cookieDomain = config.getProperty(COOKIE_DOMAIN, null);
cookiePath = config.getProperty(COOKIE_PATH, null);
}
/**
* Returns the authentication handler being used.
*
* @return the authentication handler being used.
*/
protected AuthenticationHandler getAuthenticationHandler() {
return authHandler;
}
/**
* Returns if a random secret is being used.
*
* @return if a random secret is being used.
*/
protected boolean isRandomSecret() {
return randomSecret;
}
/**
* Returns the validity time of the generated tokens.
*
* @return the validity time of the generated tokens, in seconds.
*/
protected long getValidity() {
return validity / 1000;
}
/**
* Returns the cookie domain to use for the HTTP cookie.
*
* @return the cookie domain to use for the HTTP cookie.
*/
protected String getCookieDomain() {
return cookieDomain;
}
/**
* Returns the cookie path to use for the HTTP cookie.
*
* @return the cookie path to use for the HTTP cookie.
*/
protected String getCookiePath() {
return cookiePath;
}
/**
* Destroys the filter.
* <p/>
* It invokes the {@link AuthenticationHandler#destroy()} method to release any resources it may hold.
*/
@Override
public void destroy() {
if (authHandler != null) {
authHandler.destroy();
authHandler = null;
}
}
/**
* Returns the filtered configuration (only properties starting with the specified prefix). The property keys
* are also trimmed from the prefix. The returned {@link Properties} object is used to initialized the
* {@link AuthenticationHandler}.
* <p/>
* This method can be overriden by subclasses to obtain the configuration from other configuration source than
* the web.xml file.
*
* @param configPrefix configuration prefix to use for extracting configuration properties.
* @param filterConfig filter configuration object
*
* @return the configuration to be used with the {@link AuthenticationHandler} instance.
*
* @throws ServletException thrown if the configuration could not be created.
*/
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
Properties props = new Properties();
Enumeration names = filterConfig.getInitParameterNames();
while (names.hasMoreElements()) {
String name = (String) names.nextElement();
if (name.startsWith(configPrefix)) {
String value = filterConfig.getInitParameter(name);
props.put(name.substring(configPrefix.length()), value);
}
}
return props;
}
/**
* Returns the full URL of the request including the query string.
* <p/>
* Used as a convenience method for logging purposes.
*
* @param request the request object.
*
* @return the full URL of the request including the query string.
*/
protected String getRequestURL(HttpServletRequest request) {
StringBuffer sb = request.getRequestURL();
if (request.getQueryString() != null) {
sb.append("?").append(request.getQueryString());
}
return sb.toString();
}
/**
* Returns the {@link AuthenticationToken} for the request.
* <p/>
* It looks at the received HTTP cookies and extracts the value of the {@link AuthenticatedURL#AUTH_COOKIE}
* if present. It verifies the signature and if correct it creates the {@link AuthenticationToken} and returns
* it.
* <p/>
* If this method returns <code>null</code> the filter will invoke the configured {@link AuthenticationHandler}
* to perform user authentication.
*
* @param request request object.
*
* @return the Authentication token if the request is authenticated, <code>null</code> otherwise.
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if the token is invalid or if it has expired.
*/
protected AuthenticationToken getToken(HttpServletRequest request) throws IOException, AuthenticationException {
AuthenticationToken token = null;
String tokenStr = null;
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if (cookie.getName().equals(AuthenticatedURL.AUTH_COOKIE)) {
tokenStr = cookie.getValue();
try {
tokenStr = signer.verifyAndExtract(tokenStr);
} catch (SignerException ex) {
throw new AuthenticationException(ex);
}
break;
}
}
}
if (tokenStr != null) {
token = AuthenticationToken.parse(tokenStr);
if (!token.getType().equals(authHandler.getType())) {
throw new AuthenticationException("Invalid AuthenticationToken type");
}
if (token.isExpired()) {
throw new AuthenticationException("AuthenticationToken expired");
}
}
return token;
}
/**
* If the request has a valid authentication token it allows the request to continue to the target resource,
* otherwise it triggers an authentication sequence using the configured {@link AuthenticationHandler}.
*
* @param request the request object.
* @param response the response object.
* @param filterChain the filter chain object.
*
* @throws IOException thrown if an IO error occurred.
* @throws ServletException thrown if a processing error occurred.
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
try {
boolean newToken = false;
AuthenticationToken token = getToken(httpRequest);
if (token == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] triggering authentication", getRequestURL(httpRequest));
}
token = authHandler.authenticate(httpRequest, httpResponse);
if (token != null && token != AuthenticationToken.ANONYMOUS) {
token.setExpires(System.currentTimeMillis() + getValidity() * 1000);
}
newToken = true;
}
if (token != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName());
}
final AuthenticationToken authToken = token;
httpRequest = new HttpServletRequestWrapper(httpRequest) {
@Override
public String getAuthType() {
return authToken.getType();
}
@Override
public String getRemoteUser() {
return authToken.getUserName();
}
@Override
public Principal getUserPrincipal() {
return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null;
}
};
if (newToken && token != AuthenticationToken.ANONYMOUS) {
String signedToken = signer.sign(token.toString());
Cookie cookie = createCookie(signedToken);
httpResponse.addCookie(cookie);
}
filterChain.doFilter(httpRequest, httpResponse);
}
} catch (AuthenticationException ex) {
if (!httpResponse.isCommitted()) {
Cookie cookie = createCookie("");
cookie.setMaxAge(0);
httpResponse.addCookie(cookie);
httpResponse.sendError(HttpServletResponse.SC_UNAUTHORIZED, ex.getMessage());
}
LOG.warn("Authentication exception: " + ex.getMessage(), ex);
}
}
/**
* Creates the Alfredo authentiation HTTP cookie.
* <p/>
* It sets the domain and path specified in the configuration.
*
* @param token authentication token for the cookie.
*
* @return the HTTP cookie.
*/
protected Cookie createCookie(String token) {
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, token);
if (getCookieDomain() != null) {
cookie.setDomain(getCookieDomain());
}
if (getCookiePath() != null) {
cookie.setPath(getCookiePath());
}
return cookie;
}
}

View File

@ -0,0 +1,89 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Properties;
/**
* Interface for server authentication mechanisms.
* <p/>
* The {@link AuthenticationFilter} manages the lifecycle of the authentication handler.
* <p/>
* Implementations must be thread-safe as one instance is initialized and used for all requests.
*/
public interface AuthenticationHandler {
/**
* Returns the authentication type of the authentication handler.
* <p/>
* This should be a name that uniquely identifies the authentication type.
* For example 'simple' or 'kerberos'.
*
* @return the authentication type of the authentication handler.
*/
public String getType();
/**
* Initializes the authentication handler instance.
* <p/>
* This method is invoked by the {@link AuthenticationFilter#init} method.
*
* @param config configuration properties to initialize the handler.
*
* @throws ServletException thrown if the handler could not be initialized.
*/
public void init(Properties config) throws ServletException;
/**
* Destroys the authentication handler instance.
* <p/>
* This method is invoked by the {@link AuthenticationFilter#destroy} method.
*/
public void destroy();
/**
* Performs an authentication step for the given HTTP client request.
* <p/>
* This method is invoked by the {@link AuthenticationFilter} only if the HTTP client request is
* not yet authenticated.
* <p/>
* Depending upon the authentication mechanism being implemented, a particular HTTP client may
* end up making a sequence of invocations before authentication is successfully established (this is
* the case of Kerberos SPNEGO).
* <p/>
* This method must return an {@link AuthenticationToken} only if the the HTTP client request has
* been successfully and fully authenticated.
* <p/>
* If the HTTP client request has not been completely authenticated, this method must take over
* the corresponding HTTP response and it must return <code>null</code>.
*
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return an {@link AuthenticationToken} if the HTTP client request has been authenticated,
* <code>null</code> otherwise (in this case it must take care of the response).
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if an Authentication error occurred.
*/
public AuthenticationToken authenticate(HttpServletRequest request, HttpServletResponse response)
throws IOException, AuthenticationException;
}

View File

@ -0,0 +1,226 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import java.security.Principal;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
/**
* The {@link AuthenticationToken} contains information about an authenticated HTTP client and doubles
* as the {@link Principal} to be returned by authenticated {@link HttpServletRequest}s
* <p/>
* The token can be serialized/deserialized to and from a string as it is sent and received in HTTP client
* responses and requests as a HTTP cookie (this is done by the {@link AuthenticationFilter}).
*/
public class AuthenticationToken implements Principal {
/**
* Constant that identifies an anonymous request.
*/
public static final AuthenticationToken ANONYMOUS = new AuthenticationToken();
private static final String ATTR_SEPARATOR = "&";
private static final String USER_NAME = "u";
private static final String PRINCIPAL = "p";
private static final String EXPIRES = "e";
private static final String TYPE = "t";
private final static Set<String> ATTRIBUTES =
new HashSet<String>(Arrays.asList(USER_NAME, PRINCIPAL, EXPIRES, TYPE));
private String userName;
private String principal;
private String type;
private long expires;
private String token;
private AuthenticationToken() {
userName = null;
principal = null;
type = null;
expires = -1;
token = "ANONYMOUS";
generateToken();
}
private static final String ILLEGAL_ARG_MSG = " is NULL, empty or contains a '" + ATTR_SEPARATOR + "'";
/**
* Creates an authentication token.
*
* @param userName user name.
* @param principal principal (commonly matches the user name, with Kerberos is the full/long principal
* name while the userName is the short name).
* @param type the authentication mechanism name.
* (<code>System.currentTimeMillis() + validityPeriod</code>).
*/
public AuthenticationToken(String userName, String principal, String type) {
checkForIllegalArgument(userName, "userName");
checkForIllegalArgument(principal, "principal");
checkForIllegalArgument(type, "type");
this.userName = userName;
this.principal = principal;
this.type = type;
this.expires = -1;
}
/**
* Check if the provided value is invalid. Throw an error if it is invalid, NOP otherwise.
*
* @param value the value to check.
* @param name the parameter name to use in an error message if the value is invalid.
*/
private static void checkForIllegalArgument(String value, String name) {
if (value == null || value.length() == 0 || value.contains(ATTR_SEPARATOR)) {
throw new IllegalArgumentException(name + ILLEGAL_ARG_MSG);
}
}
/**
* Sets the expiration of the token.
*
* @param expires expiration time of the token in milliseconds since the epoch.
*/
public void setExpires(long expires) {
if (this != AuthenticationToken.ANONYMOUS) {
this.expires = expires;
generateToken();
}
}
/**
* Generates the token.
*/
private void generateToken() {
StringBuffer sb = new StringBuffer();
sb.append(USER_NAME).append("=").append(userName).append(ATTR_SEPARATOR);
sb.append(PRINCIPAL).append("=").append(principal).append(ATTR_SEPARATOR);
sb.append(TYPE).append("=").append(type).append(ATTR_SEPARATOR);
sb.append(EXPIRES).append("=").append(expires);
token = sb.toString();
}
/**
* Returns the user name.
*
* @return the user name.
*/
public String getUserName() {
return userName;
}
/**
* Returns the principal name (this method name comes from the JDK {@link Principal} interface).
*
* @return the principal name.
*/
@Override
public String getName() {
return principal;
}
/**
* Returns the authentication mechanism of the token.
*
* @return the authentication mechanism of the token.
*/
public String getType() {
return type;
}
/**
* Returns the expiration time of the token.
*
* @return the expiration time of the token, in milliseconds since Epoc.
*/
public long getExpires() {
return expires;
}
/**
* Returns if the token has expired.
*
* @return if the token has expired.
*/
public boolean isExpired() {
return expires != -1 && System.currentTimeMillis() > expires;
}
/**
* Returns the string representation of the token.
* <p/>
* This string representation is parseable by the {@link #parse} method.
*
* @return the string representation of the token.
*/
@Override
public String toString() {
return token;
}
/**
* Parses a string into an authentication token.
*
* @param tokenStr string representation of a token.
*
* @return the parsed authentication token.
*
* @throws AuthenticationException thrown if the string representation could not be parsed into
* an authentication token.
*/
public static AuthenticationToken parse(String tokenStr) throws AuthenticationException {
Map<String, String> map = split(tokenStr);
if (!map.keySet().equals(ATTRIBUTES)) {
throw new AuthenticationException("Invalid token string, missing attributes");
}
long expires = Long.parseLong(map.get(EXPIRES));
AuthenticationToken token = new AuthenticationToken(map.get(USER_NAME), map.get(PRINCIPAL), map.get(TYPE));
token.setExpires(expires);
return token;
}
/**
* Splits the string representation of a token into attributes pairs.
*
* @param tokenStr string representation of a token.
*
* @return a map with the attribute pairs of the token.
*
* @throws AuthenticationException thrown if the string representation of the token could not be broken into
* attribute pairs.
*/
private static Map<String, String> split(String tokenStr) throws AuthenticationException {
Map<String, String> map = new HashMap<String, String>();
StringTokenizer st = new StringTokenizer(tokenStr, ATTR_SEPARATOR);
while (st.hasMoreTokens()) {
String part = st.nextToken();
int separator = part.indexOf('=');
if (separator == -1) {
throw new AuthenticationException("Invalid authentication token");
}
String key = part.substring(0, separator);
String value = part.substring(separator + 1);
map.put(key, value);
}
return map;
}
}

View File

@ -0,0 +1,310 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import org.apache.hadoop.alfredo.client.KerberosAuthenticator;
import com.sun.security.auth.module.Krb5LoginModule;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.alfredo.util.KerberosName;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO authentication mechanism for HTTP.
* <p/>
* The supported configuration properties are:
* <ul>
* <li>kerberos.principal: the Kerberos principal to used by the server. As stated by the Kerberos SPNEGO
* specification, it should be <code>HTTP/${HOSTNAME}@{REALM}</code>. The realm can be omitted from the
* principal as the JDK GSS libraries will use the realm name of the configured default realm.
* It does not have a default value.</li>
* <li>kerberos.keytab: the keytab file containing the credentials for the Kerberos principal.
* It does not have a default value.</li>
* </ul>
*/
public class KerberosAuthenticationHandler implements AuthenticationHandler {
private static Logger LOG = LoggerFactory.getLogger(KerberosAuthenticationHandler.class);
/**
* Kerberos context configuration for the JDK GSS library.
*/
private static class KerberosConfiguration extends Configuration {
private String keytab;
private String principal;
public KerberosConfiguration(String keytab, String principal) {
this.keytab = keytab;
this.principal = principal;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "false");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
if (LOG.isDebugEnabled()) {
options.put("debug", "true");
}
return new AppConfigurationEntry[]{
new AppConfigurationEntry(Krb5LoginModule.class.getName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options),};
}
}
/**
* Constant that identifies the authentication mechanism.
*/
public static final String TYPE = "kerberos";
/**
* Constant for the configuration property that indicates the kerberos principal.
*/
public static final String PRINCIPAL = TYPE + ".principal";
/**
* Constant for the configuration property that indicates the keytab file path.
*/
public static final String KEYTAB = TYPE + ".keytab";
/**
* Constant for the configuration property that indicates the Kerberos name
* rules for the Kerberos principals.
*/
public static final String NAME_RULES = TYPE + ".name.rules";
private String principal;
private String keytab;
private GSSManager gssManager;
private LoginContext loginContext;
/**
* Initializes the authentication handler instance.
* <p/>
* It creates a Kerberos context using the principal and keytab specified in the configuration.
* <p/>
* This method is invoked by the {@link AuthenticationFilter#init} method.
*
* @param config configuration properties to initialize the handler.
*
* @throws ServletException thrown if the handler could not be initialized.
*/
@Override
public void init(Properties config) throws ServletException {
try {
principal = config.getProperty(PRINCIPAL, principal);
if (principal == null || principal.trim().length() == 0) {
throw new ServletException("Principal not defined in configuration");
}
keytab = config.getProperty(KEYTAB, keytab);
if (keytab == null || keytab.trim().length() == 0) {
throw new ServletException("Keytab not defined in configuration");
}
if (!new File(keytab).exists()) {
throw new ServletException("Keytab does not exist: " + keytab);
}
String nameRules = config.getProperty(NAME_RULES, "DEFAULT");
KerberosName.setRules(nameRules);
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
KerberosConfiguration kerberosConfiguration = new KerberosConfiguration(keytab, principal);
loginContext = new LoginContext("", subject, null, kerberosConfiguration);
loginContext.login();
Subject serverSubject = loginContext.getSubject();
try {
gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction<GSSManager>() {
@Override
public GSSManager run() throws Exception {
return GSSManager.getInstance();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
}
LOG.info("Initialized, principal [{}] from keytab [{}]", principal, keytab);
} catch (Exception ex) {
throw new ServletException(ex);
}
}
/**
* Releases any resources initialized by the authentication handler.
* <p/>
* It destroys the Kerberos context.
*/
@Override
public void destroy() {
try {
if (loginContext != null) {
loginContext.logout();
loginContext = null;
}
} catch (LoginException ex) {
LOG.warn(ex.getMessage(), ex);
}
}
/**
* Returns the authentication type of the authentication handler, 'kerberos'.
* <p/>
*
* @return the authentication type of the authentication handler, 'kerberos'.
*/
@Override
public String getType() {
return TYPE;
}
/**
* Returns the Kerberos principal used by the authentication handler.
*
* @return the Kerberos principal used by the authentication handler.
*/
protected String getPrincipal() {
return principal;
}
/**
* Returns the keytab used by the authentication handler.
*
* @return the keytab used by the authentication handler.
*/
protected String getKeytab() {
return keytab;
}
/**
* It enforces the the Kerberos SPNEGO authentication sequence returning an {@link AuthenticationToken} only
* after the Kerberos SPNEGO sequence has completed successfully.
* <p/>
*
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return an authentication token if the Kerberos SPNEGO sequence is complete and valid,
* <code>null</code> if it is in progress (in this case the handler handles the response to the client).
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
*/
@Override
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
throws IOException, AuthenticationException {
AuthenticationToken token = null;
String authorization = request.getHeader(KerberosAuthenticator.AUTHORIZATION);
if (authorization == null || !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
if (authorization == null) {
LOG.trace("SPNEGO starting");
} else {
LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION + "' does not start with '" +
KerberosAuthenticator.NEGOTIATE + "' : {}", authorization);
}
} else {
authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
final Base64 base64 = new Base64(0);
final byte[] clientToken = base64.decode(authorization);
Subject serverSubject = loginContext.getSubject();
try {
token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {
@Override
public AuthenticationToken run() throws Exception {
AuthenticationToken token = null;
GSSContext gssContext = null;
try {
gssContext = gssManager.createContext((GSSCredential) null);
byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
if (serverToken != null && serverToken.length > 0) {
String authenticate = base64.encodeToString(serverToken);
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
KerberosAuthenticator.NEGOTIATE + " " + authenticate);
}
if (!gssContext.isEstablished()) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
LOG.trace("SPNEGO in progress");
} else {
String clientPrincipal = gssContext.getSrcName().toString();
KerberosName kerberosName = new KerberosName(clientPrincipal);
String userName = kerberosName.getShortName();
token = new AuthenticationToken(userName, clientPrincipal, TYPE);
response.setStatus(HttpServletResponse.SC_OK);
LOG.trace("SPNEGO completed for principal [{}]", clientPrincipal);
}
} finally {
if (gssContext != null) {
gssContext.dispose();
}
}
return token;
}
});
} catch (PrivilegedActionException ex) {
if (ex.getException() instanceof IOException) {
throw (IOException) ex.getException();
}
else {
throw new AuthenticationException(ex.getException());
}
}
}
return token;
}
}

View File

@ -0,0 +1,134 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import org.apache.hadoop.alfredo.client.PseudoAuthenticator;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Properties;
/**
* The <code>PseudoAuthenticationHandler</code> provides a pseudo authentication mechanism that accepts
* the user name specified as a query string parameter.
* <p/>
* This mimics the model of Hadoop Simple authentication which trust the 'user.name' property provided in
* the configuration object.
* <p/>
* This handler can be configured to support anonymous users.
* <p/>
* The only supported configuration property is:
* <ul>
* <li>simple.anonymous.allowed: <code>true|false</code>, default value is <code>false</code></li>
* </ul>
*/
public class PseudoAuthenticationHandler implements AuthenticationHandler {
/**
* Constant that identifies the authentication mechanism.
*/
public static final String TYPE = "simple";
/**
* Constant for the configuration property that indicates if anonymous users are allowed.
*/
public static final String ANONYMOUS_ALLOWED = TYPE + ".anonymous.allowed";
private boolean acceptAnonymous;
/**
* Initializes the authentication handler instance.
* <p/>
* This method is invoked by the {@link AuthenticationFilter#init} method.
*
* @param config configuration properties to initialize the handler.
*
* @throws ServletException thrown if the handler could not be initialized.
*/
@Override
public void init(Properties config) throws ServletException {
acceptAnonymous = Boolean.parseBoolean(config.getProperty(ANONYMOUS_ALLOWED, "false"));
}
/**
* Returns if the handler is configured to support anonymous users.
*
* @return if the handler is configured to support anonymous users.
*/
protected boolean getAcceptAnonymous() {
return acceptAnonymous;
}
/**
* Releases any resources initialized by the authentication handler.
* <p/>
* This implementation does a NOP.
*/
@Override
public void destroy() {
}
/**
* Returns the authentication type of the authentication handler, 'simple'.
* <p/>
*
* @return the authentication type of the authentication handler, 'simple'.
*/
@Override
public String getType() {
return TYPE;
}
/**
* Authenticates an HTTP client request.
* <p/>
* It extracts the {@link PseudoAuthenticator#USER_NAME} parameter from the query string and creates
* an {@link AuthenticationToken} with it.
* <p/>
* If the HTTP client request does not contain the {@link PseudoAuthenticator#USER_NAME} parameter and
* the handler is configured to allow anonymous users it returns the {@link AuthenticationToken#ANONYMOUS}
* token.
* <p/>
* If the HTTP client request does not contain the {@link PseudoAuthenticator#USER_NAME} parameter and
* the handler is configured to disallow anonymous users it throws an {@link AuthenticationException}.
*
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return an authentication token if the HTTP client request is accepted and credentials are valid.
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if HTTP client request was not accepted as an authentication request.
*/
@Override
public AuthenticationToken authenticate(HttpServletRequest request, HttpServletResponse response)
throws IOException, AuthenticationException {
AuthenticationToken token;
String userName = request.getParameter(PseudoAuthenticator.USER_NAME);
if (userName == null) {
if (getAcceptAnonymous()) {
token = AuthenticationToken.ANONYMOUS;
} else {
throw new AuthenticationException("Anonymous requests are disallowed");
}
} else {
token = new AuthenticationToken(userName, userName, TYPE);
}
return token;
}
}

View File

@ -1,3 +1,5 @@
package org.apache.hadoop.alfredo.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -16,8 +18,6 @@
* limitations under the License.
*/
package org.apache.hadoop.security;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -26,13 +26,12 @@ import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import sun.security.krb5.Config;
import sun.security.krb5.KrbException;
/**
* This class implements parsing and handling of Kerberos principal names. In
* This class implements parsing and handling of Kerberos principal names. In
* particular, it splits them apart and translates them down into local
* operating system names.
*/
@ -50,14 +49,14 @@ public class KerberosName {
/**
* A pattern that matches a Kerberos name with at most 2 components.
*/
private static final Pattern nameParser =
private static final Pattern nameParser =
Pattern.compile("([^/@]*)(/([^/@]*))?@([^/@]*)");
/**
/**
* A pattern that matches a string with out '$' and then a single
* parameter with $n.
*/
private static Pattern parameterPattern =
private static Pattern parameterPattern =
Pattern.compile("([^$]*)(\\$(\\d*))?");
/**
@ -66,12 +65,12 @@ public class KerberosName {
private static final Pattern ruleParser =
Pattern.compile("\\s*((DEFAULT)|(RULE:\\[(\\d*):([^\\]]*)](\\(([^)]*)\\))?"+
"(s/([^/]*)/([^/]*)/(g)?)?))");
/**
* A pattern that recognizes simple/non-simple names.
*/
private static final Pattern nonSimplePattern = Pattern.compile("[/@]");
/**
* The list of translation rules.
*/
@ -79,15 +78,12 @@ public class KerberosName {
private static String defaultRealm;
private static Config kerbConf;
static {
try {
kerbConf = Config.getInstance();
defaultRealm = kerbConf.getDefaultRealm();
} catch (KrbException ke) {
if(UserGroupInformation.isSecurityEnabled())
throw new IllegalArgumentException("Can't get Kerberos configuration",ke);
else
defaultRealm="";
}
}
@ -154,7 +150,7 @@ public class KerberosName {
public String getHostName() {
return hostName;
}
/**
* Get the realm of the name.
* @return the realm of the name, may be null
@ -162,7 +158,7 @@ public class KerberosName {
public String getRealm() {
return realm;
}
/**
* An encoding of a rule for translating kerberos names.
*/
@ -191,12 +187,12 @@ public class KerberosName {
this.numOfComponents = numOfComponents;
this.format = format;
this.match = match == null ? null : Pattern.compile(match);
this.fromPattern =
this.fromPattern =
fromPattern == null ? null : Pattern.compile(fromPattern);
this.toPattern = toPattern;
this.repeat = repeat;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
@ -226,9 +222,9 @@ public class KerberosName {
}
return buf.toString();
}
/**
* Replace the numbered parameters of the form $n where n is from 1 to
* Replace the numbered parameters of the form $n where n is from 1 to
* the length of params. Normal text is copied directly and $n is replaced
* by the corresponding parameter.
* @param format the string to replace parameters again
@ -236,7 +232,7 @@ public class KerberosName {
* @return the generated string with the parameter references replaced.
* @throws BadFormatString
*/
static String replaceParameters(String format,
static String replaceParameters(String format,
String[] params) throws BadFormatString {
Matcher match = parameterPattern.matcher(format);
int start = 0;
@ -254,10 +250,10 @@ public class KerberosName {
}
result.append(params[num]);
} catch (NumberFormatException nfe) {
throw new BadFormatString("bad format in username mapping in " +
throw new BadFormatString("bad format in username mapping in " +
paramNum, nfe);
}
}
start = match.end();
}
@ -273,7 +269,7 @@ public class KerberosName {
* @param repeat whether the substitution should be repeated
* @return
*/
static String replaceSubstitution(String base, Pattern from, String to,
static String replaceSubstitution(String base, Pattern from, String to,
boolean repeat) {
Matcher match = from.matcher(base);
if (repeat) {
@ -338,16 +334,6 @@ public class KerberosName {
return result;
}
/**
* Set the static configuration to get the rules.
* @param conf the new configuration
* @throws IOException
*/
public static void setConfiguration(Configuration conf) throws IOException {
String ruleString = conf.get("hadoop.security.auth_to_local", "DEFAULT");
rules = parseRules(ruleString);
}
@SuppressWarnings("serial")
public static class BadFormatString extends IOException {
BadFormatString(String msg) {
@ -391,6 +377,14 @@ public class KerberosName {
throw new NoMatchingRule("No rules applied to " + toString());
}
/**
* Set the rules.
* @param ruleString the rules string.
*/
public static void setRules(String ruleString) {
rules = parseRules(ruleString);
}
static void printRules() throws IOException {
int i = 0;
for(Rule r: rules) {
@ -398,11 +392,4 @@ public class KerberosName {
}
}
public static void main(String[] args) throws Exception {
setConfiguration(new Configuration());
for(String arg: args) {
KerberosName name = new KerberosName(arg);
System.out.println("Name: " + name + " to " + name.getShortName());
}
}
}

View File

@ -0,0 +1,100 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.util;
import org.apache.commons.codec.binary.Base64;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* Signs strings and verifies signed strings using a SHA digest.
*/
public class Signer {
private static final String SIGNATURE = "&s=";
private byte[] secret;
/**
* Creates a Signer instance using the specified secret.
*
* @param secret secret to use for creating the digest.
*/
public Signer(byte[] secret) {
if (secret == null) {
throw new IllegalArgumentException("secret cannot be NULL");
}
this.secret = secret.clone();
}
/**
* Returns a signed string.
* <p/>
* The signature '&s=SIGNATURE' is appended at the end of the string.
*
* @param str string to sign.
*
* @return the signed string.
*/
public String sign(String str) {
if (str == null || str.length() == 0) {
throw new IllegalArgumentException("NULL or empty string to sign");
}
String signature = computeSignature(str);
return str + SIGNATURE + signature;
}
/**
* Verifies a signed string and extracts the original string.
*
* @param signedStr the signed string to verify and extract.
*
* @return the extracted original string.
*
* @throws SignerException thrown if the given string is not a signed string or if the signature is invalid.
*/
public String verifyAndExtract(String signedStr) throws SignerException {
int index = signedStr.lastIndexOf(SIGNATURE);
if (index == -1) {
throw new SignerException("Invalid signed text: " + signedStr);
}
String originalSignature = signedStr.substring(index + SIGNATURE.length());
String rawValue = signedStr.substring(0, index);
String currentSignature = computeSignature(rawValue);
if (!originalSignature.equals(currentSignature)) {
throw new SignerException("Invalid signature");
}
return rawValue;
}
/**
* Returns then signature of a string.
*
* @param str string to sign.
*
* @return the signature for the string.
*/
protected String computeSignature(String str) {
try {
MessageDigest md = MessageDigest.getInstance("SHA");
md.update(str.getBytes());
md.update(secret);
byte[] digest = md.digest();
return new Base64(0).encodeToString(digest);
} catch (NoSuchAlgorithmException ex) {
throw new RuntimeException("It should not happen, " + ex.getMessage(), ex);
}
}
}

View File

@ -0,0 +1,31 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.util;
/**
* Exception thrown by {@link Signer} when a string signature is invalid.
*/
public class SignerException extends Exception {
static final long serialVersionUID = 0;
/**
* Creates an exception instance.
*
* @param msg message for the exception.
*/
public SignerException(String msg) {
super(msg);
}
}

View File

@ -0,0 +1,75 @@
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License. See accompanying LICENSE file.
---
Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Building It
---
---
${maven.build.timestamp}
Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Building It
\[ {{{./index.html}Go Back}} \]
* Requirements
* Java 6+
* Maven 3+
* Kerberos KDC (for running Kerberos test cases)
* Building
Use Maven goals: clean, test, compile, package, install
Available profiles: docs, testKerberos
* Testing
By default Kerberos testcases are not run.
The requirements to run Kerberos testcases are a running KDC, a keytab
file with a client principal and a kerberos principal.
To run Kerberos tescases use the <<<testKerberos>>> Maven profile:
+---+
$ mvn test -PtestKerberos
+---+
The following Maven <<<-D>>> options can be used to change the default
values:
* <<<alfredo.test.kerberos.realm>>>: default value <<LOCALHOST>>
* <<<alfredo.test.kerberos.client.principal>>>: default value <<client>>
* <<<alfredo.test.kerberos.server.principal>>>: default value
<<HTTP/localhost>> (it must start 'HTTP/')
* <<<alfredo.test.kerberos.keytab.file>>>: default value
<<${HOME}/${USER}.keytab>>
** Generating Documentation
To create the documentation use the <<<docs>>> Maven profile:
+---+
$ mvn package -Pdocs
+---+
The generated documentation is available at
<<<hadoop-alfredo/target/site/>>>.
\[ {{{./index.html}Go Back}} \]

View File

@ -0,0 +1,181 @@
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License. See accompanying LICENSE file.
---
Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Server Side
Configuration
---
---
${maven.build.timestamp}
Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Server Side
Configuration
\[ {{{./index.html}Go Back}} \]
* Server Side Configuration Setup
The {{{./apidocs/org/apache/hadoop/alfredo/server/AuthenticationFilter.html}
AuthenticationFilter filter}} is Alfredo's server side component.
This filter must be configured in front of all the web application resources
that required authenticated requests. For example:
The Alfredo and dependent JAR files must be in the web application classpath
(commonly the <<<WEB-INF/lib>>> directory).
Alfredo uses SLF4J-API for logging. Alfredo Maven POM dependencies define the
SLF4J API dependency but it does not define the dependency on a concrete
logging implementation, this must be addded explicitly to the web
application. For example, if the web applicationan uses Log4j, the
SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application
classpath as well as the Log4j configuration file.
** Common Configuration parameters
* <<<config.prefix>>>: If specified, all other configuration parameter names
must start with the prefix. The default value is no prefix.
* <<<[PREFIX.]type>>>: the authentication type keyword (<<<simple>>> or
<<<kerberos>>>) or a
{{{./apidocs/org/apache/hadoop/alfredo/server/AuthenticationHandler.html}
Authentication handler implementation}}.
* <<<[PREFIX.]signature.secret>>>: The secret to SHA-sign the generated
authentication tokens. If a secret is not provided a random secret is
generated at start up time. If using multiple web application instances
behind a load-balancer a secret must be set for the application to work
properly.
* <<<[PREFIX.]token.validity>>>: The validity -in seconds- of the generated
authentication token. The default value is <<<3600>>> seconds.
* <<<[PREFIX.]cookie.domain>>>: domain to use for the HTTP cookie that stores
the authentication token.
* <<<[PREFIX.]cookie.path>>>: path to use for the HTTP cookie that stores the
authentication token.
** Kerberos Configuration
<<IMPORTANT>>: A KDC must be configured and running.
To use Kerberos SPNEGO as the authentication mechanism, the authentication
filter must be configured with the following init parameters:
* <<<[PREFIX.]type>>>: the keyword <<<kerberos>>>.
* <<<[PREFIX.]kerberos.principal>>>: The web-application Kerberos principal
name. The Kerberos principal name must start with <<<HTTP/...>>>. For
example: <<<HTTP/localhost@LOCALHOST>>>. There is no default value.
* <<<[PREFIX.]kerberos.keytab>>>: The path to the keytab file containing
the credentials for the kerberos principal. For example:
<<</Users/tucu/alfredo.keytab>>>. There is no default value.
<<Example>>:
+---+
<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
...
<filter>
<filter-name>kerberosFilter</filter-name>
<filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
<init-param>
<param-name>type</param-name>
<param-value>kerberos</param-value>
</init-param>
<init-param>
<param-name>token.validity</param-name>
<param-value>30</param-value>
</init-param>
<init-param>
<param-name>cookie.domain</param-name>
<param-value>.foo.com</param-value>
</init-param>
<init-param>
<param-name>cookie.path</param-name>
<param-value>/</param-value>
</init-param>
<init-param>
<param-name>kerberos.principal</param-name>
<param-value>HTTP/localhost@LOCALHOST</param-value>
</init-param>
<init-param>
<param-name>kerberos.keytab</param-name>
<param-value>/tmp/alfredo.keytab</param-value>
</init-param>
</filter>
<filter-mapping>
<filter-name>kerberosFilter</filter-name>
<url-pattern>/kerberos/*</url-pattern>
</filter-mapping>
...
</web-app>
+---+
** Pseudo/Simple Configuration
To use Pseudo/Simple as the authentication mechanism (trusting the value of
the query string parameter 'user.name'), the authentication filter must be
configured with the following init parameters:
* <<<[PREFIX.]type>>>: the keyword <<<simple>>>.
* <<<[PREFIX.]simple.anonymous.allowed>>>: is a boolean parameter that
indicates if anonymous requests are allowed or not. The default value is
<<<false>>>.
<<Example>>:
+---+
<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
...
<filter>
<filter-name>simpleFilter</filter-name>
<filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
<init-param>
<param-name>type</param-name>
<param-value>simple</param-value>
</init-param>
<init-param>
<param-name>token.validity</param-name>
<param-value>30</param-value>
</init-param>
<init-param>
<param-name>cookie.domain</param-name>
<param-value>.foo.com</param-value>
</init-param>
<init-param>
<param-name>cookie.path</param-name>
<param-value>/</param-value>
</init-param>
<init-param>
<param-name>simple.anonymous.allowed</param-name>
<param-value>false</param-value>
</init-param>
</filter>
<filter-mapping>
<filter-name>simpleFilter</filter-name>
<url-pattern>/simple/*</url-pattern>
</filter-mapping>
...
</web-app>
+---+
\[ {{{./index.html}Go Back}} \]

View File

@ -0,0 +1,137 @@
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License. See accompanying LICENSE file.
---
Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Examples
---
---
${maven.build.timestamp}
Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Examples
\[ {{{./index.html}Go Back}} \]
* Accessing a Alfredo protected URL Using a browser
<<IMPORTANT:>> The browser must support HTTP Kerberos SPNEGO. For example,
Firefox or Internet Explorer.
For Firefox access the low level configuration page by loading the
<<<about:config>>> page. Then go to the
<<<network.negotiate-auth.trusted-uris>>> preference and add the hostname or
the domain of the web server that is HTTP Kerberos SPNEGO protected (if using
multiple domains and hostname use comma to separate them).
* Accessing a Alfredo protected URL Using <<<curl>>>
<<IMPORTANT:>> The <<<curl>>> version must support GSS, run <<<curl -V>>>.
+---+
$ curl -V
curl 7.19.7 (universal-apple-darwin10.0) libcurl/7.19.7 OpenSSL/0.9.8l zlib/1.2.3
Protocols: tftp ftp telnet dict ldap http file https ftps
Features: GSS-Negotiate IPv6 Largefile NTLM SSL libz
+---+
Login to the KDC using <<kinit>> and then use <<<curl>>> to fetch protected
URL:
+---+
$ kinit
Please enter the password for tucu@LOCALHOST:
$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt http://localhost:8080/alfredo-examples/kerberos/who
Enter host password for user 'tucu':
Hello Alfredo!
+---+
* The <<<--negotiate>>> option enables SPNEGO in <<<curl>>>.
* The <<<-u foo>>> option is required but the user ignored (the principal
that has been kinit-ed is used).
* The <<<-b>>> and <<<-c>>> are use to store and send HTTP Cookies.
* Using the Java Client
Use the <<<AuthenticatedURL>>> class to obtain an authenticated HTTP
connection:
+---+
...
URL url = new URL("http://localhost:8080/alfredo/kerberos/who");
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
...
HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
...
conn = new AuthenticatedURL(url, token).openConnection();
...
+---+
* Building and Running the Examples
Download Alfredo's source code, the examples are in the
<<<src/main/examples>>> directory.
** Server Example:
Edit the <<<src/main/examples/src/main/webapp/WEB-INF/web.xml>>> and set the
right configuration init parameters for the <<<AuthenticationFilter>>>
definition configured for Kerberos (the right Kerberos principal and keytab
file must be specified). Refer to the {{{./Configuration.html}Configuration
document}} for details.
Create the web application WAR file by running the <<<mvn package>>> command.
Deploy the WAR file in a servlet container. For example, if using Tomcat,
copy the WAR file to Tomcat's <<<webapps/>>> directory.
Start the servlet container.
** Accessing the server using <<<curl>>>
Try accessing protected resources using <<<curl>>>. The protected resources
are:
+---+
$ kinit
Please enter the password for tucu@LOCALHOST:
$ curl http://localhost:8080/alfredo-examples/anonymous/who
$ curl http://localhost:8080/alfredo-examples/simple/who?user.name=foo
$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt http://localhost:8080/alfredo-examples/kerberos/who
+---+
** Accessing the server using the Java client example
+---+
$ kinit
Please enter the password for tucu@LOCALHOST:
$ cd examples
$ mvn exec:java -Durl=http://localhost:8080/alfredo-examples/kerberos/who
....
Token value: "u=tucu,p=tucu@LOCALHOST,t=kerberos,e=1295305313146,s=sVZ1mpSnC5TKhZQE3QLN5p2DWBo="
Status code: 200 OK
You are: user[tucu] principal[tucu@LOCALHOST]
....
+---+
\[ {{{./index.html}Go Back}} \]

View File

@ -0,0 +1,53 @@
~~ Licensed under the Apache License, Version 2.0 (the "License");
~~ you may not use this file except in compliance with the License.
~~ You may obtain a copy of the License at
~~
~~ http://www.apache.org/licenses/LICENSE-2.0
~~
~~ Unless required by applicable law or agreed to in writing, software
~~ distributed under the License is distributed on an "AS IS" BASIS,
~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~~ See the License for the specific language governing permissions and
~~ limitations under the License. See accompanying LICENSE file.
---
Hadoop Alfredo, Java HTTP SPNEGO ${project.version}
---
---
${maven.build.timestamp}
Hadoop Alfredo, Java HTTP SPNEGO ${project.version}
Hadoop Alfredo is a Java library consisting of a client and a server
components to enable Kerberos SPNEGO authentication for HTTP.
Alfredo also supports additional authentication mechanisms on the client
and the server side via 2 simple interfaces.
* License
Alfredo is distributed under {{{http://www.apache.org/licenses/}Apache
License 2.0}}.
* How Does Alfredo Works?
Alfredo enforces authentication on protected resources, once authentiation
has been established it sets a signed HTTP Cookie that contains an
authentication token with the user name, user principal, authentication type
and expiration time.
Subsequent HTTP client requests presenting the signed HTTP Cookie have access
to the protected resources until the HTTP Cookie expires.
* User Documentation
* {{{./Examples.html}Examples}}
* {{{./Configuration.html}Configuration}}
* {{{./BuildingIt.html}Building It}}
* {{{./apidocs/index.html}JavaDocs}}
* {{{./dependencies.html}Dependencies}}

View File

@ -0,0 +1,34 @@
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project name="Hadoop Alfredo">
<version position="right"/>
<bannerLeft>
<name>&nbsp;</name>
</bannerLeft>
<skin>
<groupId>org.apache.maven.skins</groupId>
<artifactId>maven-stylus-skin</artifactId>
<version>1.1</version>
</skin>
<body>
<links>
<item name="Apache Hadoop" href="http://hadoop.apache.org/"/>
</links>
</body>
</project>

View File

@ -0,0 +1,129 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo;
import com.sun.security.auth.module.Krb5LoginModule;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import java.io.File;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
/**
* Test helper class for Java Kerberos setup.
*/
public class KerberosTestUtils {
private static final String PREFIX = "alfredo.test.";
public static final String REALM = PREFIX + "kerberos.realm";
public static final String CLIENT_PRINCIPAL = PREFIX + "kerberos.client.principal";
public static final String SERVER_PRINCIPAL = PREFIX + "kerberos.server.principal";
public static final String KEYTAB_FILE = PREFIX + "kerberos.keytab.file";
public static String getRealm() {
return System.getProperty(REALM, "LOCALHOST");
}
public static String getClientPrincipal() {
return System.getProperty(CLIENT_PRINCIPAL, "client") + "@" + getRealm();
}
public static String getServerPrincipal() {
return System.getProperty(SERVER_PRINCIPAL, "HTTP/localhost") + "@" + getRealm();
}
public static String getKeytabFile() {
String keytabFile =
new File(System.getProperty("user.home"), System.getProperty("user.name") + ".keytab").toString();
return System.getProperty(KEYTAB_FILE, keytabFile);
}
private static class KerberosConfiguration extends Configuration {
private String principal;
public KerberosConfiguration(String principal) {
this.principal = principal;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", KerberosTestUtils.getKeytabFile());
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "true");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
options.put("debug", "true");
return new AppConfigurationEntry[]{
new AppConfigurationEntry(Krb5LoginModule.class.getName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options),};
}
}
public static <T> T doAs(String principal, final Callable<T> callable) throws Exception {
LoginContext loginContext = null;
try {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal));
loginContext.login();
subject = loginContext.getSubject();
return Subject.doAs(subject, new PrivilegedExceptionAction<T>() {
@Override
public T run() throws Exception {
return callable.call();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
} finally {
if (loginContext != null) {
loginContext.logout();
}
}
}
public static <T> T doAsClient(Callable<T> callable) throws Exception {
return doAs(getClientPrincipal(), callable);
}
public static <T> T doAsServer(Callable<T> callable) throws Exception {
return doAs(getServerPrincipal(), callable);
}
}

View File

@ -0,0 +1,152 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import org.apache.hadoop.alfredo.server.AuthenticationFilter;
import junit.framework.TestCase;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.FilterHolder;
import org.mortbay.jetty.servlet.ServletHolder;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.ServerSocket;
import java.net.URL;
import java.util.Properties;
public abstract class AuthenticatorTestCase extends TestCase {
private Server server;
private String host = null;
private int port = -1;
Context context;
private static Properties authenticatorConfig;
protected static void setAuthenticationHandlerConfig(Properties config) {
authenticatorConfig = config;
}
public static class TestFilter extends AuthenticationFilter {
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
return authenticatorConfig;
}
}
public static class TestServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
InputStream is = req.getInputStream();
OutputStream os = resp.getOutputStream();
int c = is.read();
while (c > -1) {
os.write(c);
c = is.read();
}
is.close();
os.close();
resp.setStatus(HttpServletResponse.SC_OK);
}
}
protected void start() throws Exception {
server = new Server(0);
context = new Context();
context.setContextPath("/foo");
server.setHandler(context);
context.addFilter(new FilterHolder(TestFilter.class), "/*", 0);
context.addServlet(new ServletHolder(TestServlet.class), "/bar");
host = "localhost";
ServerSocket ss = new ServerSocket(0);
port = ss.getLocalPort();
ss.close();
server.getConnectors()[0].setHost(host);
server.getConnectors()[0].setPort(port);
server.start();
System.out.println("Running embedded servlet container at: http://" + host + ":" + port);
}
protected void stop() throws Exception {
try {
server.stop();
} catch (Exception e) {
}
try {
server.destroy();
} catch (Exception e) {
}
}
protected String getBaseURL() {
return "http://" + host + ":" + port + "/foo/bar";
}
private String POST = "test";
protected void _testAuthentication(Authenticator authenticator, boolean doPost) throws Exception {
start();
try {
URL url = new URL(getBaseURL());
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
AuthenticatedURL aUrl = new AuthenticatedURL(authenticator);
HttpURLConnection conn = aUrl.openConnection(url, token);
String tokenStr = token.toString();
if (doPost) {
conn.setRequestMethod("POST");
conn.setDoOutput(true);
}
conn.connect();
if (doPost) {
Writer writer = new OutputStreamWriter(conn.getOutputStream());
writer.write(POST);
writer.close();
}
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
if (doPost) {
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String echo = reader.readLine();
assertEquals(POST, echo);
assertNull(reader.readLine());
}
aUrl = new AuthenticatedURL();
conn = aUrl.openConnection(url, token);
conn.connect();
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
assertEquals(tokenStr, token.toString());
} finally {
stop();
}
}
}

View File

@ -0,0 +1,113 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import junit.framework.TestCase;
import org.mockito.Mockito;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TestAuthenticatedURL extends TestCase {
public void testToken() throws Exception {
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
assertFalse(token.isSet());
token = new AuthenticatedURL.Token("foo");
assertTrue(token.isSet());
assertEquals("foo", token.toString());
AuthenticatedURL.Token token1 = new AuthenticatedURL.Token();
AuthenticatedURL.Token token2 = new AuthenticatedURL.Token();
assertEquals(token1.hashCode(), token2.hashCode());
assertTrue(token1.equals(token2));
token1 = new AuthenticatedURL.Token();
token2 = new AuthenticatedURL.Token("foo");
assertNotSame(token1.hashCode(), token2.hashCode());
assertFalse(token1.equals(token2));
token1 = new AuthenticatedURL.Token("foo");
token2 = new AuthenticatedURL.Token();
assertNotSame(token1.hashCode(), token2.hashCode());
assertFalse(token1.equals(token2));
token1 = new AuthenticatedURL.Token("foo");
token2 = new AuthenticatedURL.Token("foo");
assertEquals(token1.hashCode(), token2.hashCode());
assertTrue(token1.equals(token2));
token1 = new AuthenticatedURL.Token("bar");
token2 = new AuthenticatedURL.Token("foo");
assertNotSame(token1.hashCode(), token2.hashCode());
assertFalse(token1.equals(token2));
token1 = new AuthenticatedURL.Token("foo");
token2 = new AuthenticatedURL.Token("bar");
assertNotSame(token1.hashCode(), token2.hashCode());
assertFalse(token1.equals(token2));
}
public void testInjectToken() throws Exception {
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
token.set("foo");
AuthenticatedURL.injectToken(conn, token);
Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString());
}
public void testExtractTokenOK() throws Exception {
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK);
String tokenStr = "foo";
Map<String, List<String>> headers = new HashMap<String, List<String>>();
List<String> cookies = new ArrayList<String>();
cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr);
headers.put("Set-Cookie", cookies);
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
AuthenticatedURL.extractToken(conn, token);
assertEquals(tokenStr, token.toString());
}
public void testExtractTokenFail() throws Exception {
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
String tokenStr = "foo";
Map<String, List<String>> headers = new HashMap<String, List<String>>();
List<String> cookies = new ArrayList<String>();
cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr);
headers.put("Set-Cookie", cookies);
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
try {
AuthenticatedURL.extractToken(conn, new AuthenticatedURL.Token());
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
}
}
}

View File

@ -0,0 +1,83 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import org.apache.hadoop.alfredo.KerberosTestUtils;
import org.apache.hadoop.alfredo.server.AuthenticationFilter;
import org.apache.hadoop.alfredo.server.PseudoAuthenticationHandler;
import org.apache.hadoop.alfredo.server.KerberosAuthenticationHandler;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Properties;
import java.util.concurrent.Callable;
public class TestKerberosAuthenticator extends AuthenticatorTestCase {
private Properties getAuthenticationHandlerConfiguration() {
Properties props = new Properties();
props.setProperty(AuthenticationFilter.AUTH_TYPE, "kerberos");
props.setProperty(KerberosAuthenticationHandler.PRINCIPAL, KerberosTestUtils.getServerPrincipal());
props.setProperty(KerberosAuthenticationHandler.KEYTAB, KerberosTestUtils.getKeytabFile());
props.setProperty(KerberosAuthenticationHandler.NAME_RULES,
"RULE:[1:$1@$0](.*@" + KerberosTestUtils.getRealm()+")s/@.*//\n");
return props;
}
public void testFallbacktoPseudoAuthenticator() throws Exception {
Properties props = new Properties();
props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
setAuthenticationHandlerConfig(props);
_testAuthentication(new KerberosAuthenticator(), false);
}
public void testNotAuthenticated() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
start();
try {
URL url = new URL(getBaseURL());
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.connect();
assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
assertTrue(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE) != null);
} finally {
stop();
}
}
public void testAuthentication() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
_testAuthentication(new KerberosAuthenticator(), false);
return null;
}
});
}
public void testAuthenticationPost() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
_testAuthentication(new KerberosAuthenticator(), true);
return null;
}
});
}
}

View File

@ -0,0 +1,83 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.client;
import org.apache.hadoop.alfredo.server.AuthenticationFilter;
import org.apache.hadoop.alfredo.server.PseudoAuthenticationHandler;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Properties;
public class TestPseudoAuthenticator extends AuthenticatorTestCase {
private Properties getAuthenticationHandlerConfiguration(boolean anonymousAllowed) {
Properties props = new Properties();
props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, Boolean.toString(anonymousAllowed));
return props;
}
public void testGetUserName() throws Exception {
PseudoAuthenticator authenticator = new PseudoAuthenticator();
assertEquals(System.getProperty("user.name"), authenticator.getUserName());
}
public void testAnonymousAllowed() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
start();
try {
URL url = new URL(getBaseURL());
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.connect();
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
} finally {
stop();
}
}
public void testAnonymousDisallowed() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
start();
try {
URL url = new URL(getBaseURL());
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.connect();
assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
} finally {
stop();
}
}
public void testAuthenticationAnonymousAllowed() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
_testAuthentication(new PseudoAuthenticator(), false);
}
public void testAuthenticationAnonymousDisallowed() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
_testAuthentication(new PseudoAuthenticator(), false);
}
public void testAuthenticationAnonymousAllowedWithPost() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
_testAuthentication(new PseudoAuthenticator(), true);
}
public void testAuthenticationAnonymousDisallowedWithPost() throws Exception {
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
_testAuthentication(new PseudoAuthenticator(), true);
}
}

View File

@ -0,0 +1,611 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticatedURL;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import org.apache.hadoop.alfredo.util.Signer;
import junit.framework.TestCase;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
import java.util.Vector;
public class TestAuthenticationFilter extends TestCase {
public void testGetConfiguration() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("");
Mockito.when(config.getInitParameter("a")).thenReturn("A");
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector(Arrays.asList("a")).elements());
Properties props = filter.getConfiguration("", config);
assertEquals("A", props.getProperty("a"));
config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
Mockito.when(config.getInitParameter("foo.a")).thenReturn("A");
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector(Arrays.asList("foo.a")).elements());
props = filter.getConfiguration("foo.", config);
assertEquals("A", props.getProperty("a"));
}
public void testInitEmpty() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector().elements());
filter.init(config);
fail();
} catch (ServletException ex) {
// Expected
} catch (Exception ex) {
fail();
} finally {
filter.destroy();
}
}
public static class DummyAuthenticationHandler implements AuthenticationHandler {
public static boolean init;
public static boolean destroy;
public static final String TYPE = "dummy";
public static void reset() {
init = false;
destroy = false;
}
@Override
public void init(Properties config) throws ServletException {
init = true;
}
@Override
public void destroy() {
destroy = true;
}
@Override
public String getType() {
return TYPE;
}
@Override
public AuthenticationToken authenticate(HttpServletRequest request, HttpServletResponse response)
throws IOException, AuthenticationException {
AuthenticationToken token = null;
String param = request.getParameter("authenticated");
if (param != null && param.equals("true")) {
token = new AuthenticationToken("u", "p", "t");
token.setExpires(System.currentTimeMillis() + 1000);
} else {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
return token;
}
}
public void testInit() throws Exception {
// minimal configuration & simple auth handler (Pseudo)
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("simple");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TOKEN_VALIDITY)).thenReturn("1000");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.AUTH_TOKEN_VALIDITY)).elements());
filter.init(config);
assertEquals(PseudoAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
assertTrue(filter.isRandomSecret());
assertNull(filter.getCookieDomain());
assertNull(filter.getCookiePath());
assertEquals(1000, filter.getValidity());
} finally {
filter.destroy();
}
// custom secret
filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("simple");
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.SIGNATURE_SECRET)).elements());
filter.init(config);
assertFalse(filter.isRandomSecret());
} finally {
filter.destroy();
}
// custom cookie domain and cookie path
filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("simple");
Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_DOMAIN)).thenReturn(".foo.com");
Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_PATH)).thenReturn("/bar");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.COOKIE_DOMAIN,
AuthenticationFilter.COOKIE_PATH)).elements());
filter.init(config);
assertEquals(".foo.com", filter.getCookieDomain());
assertEquals("/bar", filter.getCookiePath());
} finally {
filter.destroy();
}
// authentication handler lifecycle, and custom impl
DummyAuthenticationHandler.reset();
filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
assertTrue(DummyAuthenticationHandler.init);
} finally {
filter.destroy();
assertTrue(DummyAuthenticationHandler.destroy);
}
// kerberos auth handler
filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("kerberos");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
} catch (ServletException ex) {
// Expected
} finally {
assertEquals(KerberosAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
filter.destroy();
}
}
public void testGetRequestURL() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
Mockito.when(request.getQueryString()).thenReturn("a=A&b=B");
assertEquals("http://foo:8080/bar?a=A&b=B", filter.getRequestURL(request));
} finally {
filter.destroy();
}
}
public void testGetToken() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.SIGNATURE_SECRET)).elements());
filter.init(config);
AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
token.setExpires(System.currentTimeMillis() + 1000);
Signer signer = new Signer("secret".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
AuthenticationToken newToken = filter.getToken(request);
assertEquals(token.toString(), newToken.toString());
} finally {
filter.destroy();
}
}
public void testGetTokenExpired() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.SIGNATURE_SECRET)).elements());
filter.init(config);
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
token.setExpires(System.currentTimeMillis() - 1000);
Signer signer = new Signer("secret".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
try {
filter.getToken(request);
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
}
} finally {
filter.destroy();
}
}
public void testGetTokenInvalidType() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.SIGNATURE_SECRET)).elements());
filter.init(config);
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
token.setExpires(System.currentTimeMillis() + 1000);
Signer signer = new Signer("secret".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
try {
filter.getToken(request);
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
}
} finally {
filter.destroy();
}
}
public void testDoFilterNotAuthenticated() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
FilterChain chain = Mockito.mock(FilterChain.class);
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
fail();
return null;
}
}
).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject());
filter.doFilter(request, response, chain);
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
} finally {
filter.destroy();
}
}
private void _testDoFilterAuthentication(boolean withDomainPath) throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TOKEN_VALIDITY)).thenReturn("1000");
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.AUTH_TOKEN_VALIDITY,
AuthenticationFilter.SIGNATURE_SECRET)).elements());
if (withDomainPath) {
Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_DOMAIN)).thenReturn(".foo.com");
Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_PATH)).thenReturn("/bar");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.AUTH_TOKEN_VALIDITY,
AuthenticationFilter.SIGNATURE_SECRET,
AuthenticationFilter.COOKIE_DOMAIN,
AuthenticationFilter.COOKIE_PATH)).elements());
}
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getParameter("authenticated")).thenReturn("true");
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
Mockito.when(request.getQueryString()).thenReturn("authenticated=true");
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
FilterChain chain = Mockito.mock(FilterChain.class);
final boolean[] calledDoFilter = new boolean[1];
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
calledDoFilter[0] = true;
return null;
}
}
).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject());
final Cookie[] setCookie = new Cookie[1];
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
setCookie[0] = (Cookie) args[0];
return null;
}
}
).when(response).addCookie(Mockito.<Cookie>anyObject());
filter.doFilter(request, response, chain);
assertNotNull(setCookie[0]);
assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
assertTrue(setCookie[0].getValue().contains("u="));
assertTrue(setCookie[0].getValue().contains("p="));
assertTrue(setCookie[0].getValue().contains("t="));
assertTrue(setCookie[0].getValue().contains("e="));
assertTrue(setCookie[0].getValue().contains("s="));
assertTrue(calledDoFilter[0]);
Signer signer = new Signer("secret".getBytes());
String value = signer.verifyAndExtract(setCookie[0].getValue());
AuthenticationToken token = AuthenticationToken.parse(value);
assertEquals(System.currentTimeMillis() + 1000 * 1000, token.getExpires(), 100);
if (withDomainPath) {
assertEquals(".foo.com", setCookie[0].getDomain());
assertEquals("/bar", setCookie[0].getPath());
} else {
assertNull(setCookie[0].getDomain());
assertNull(setCookie[0].getPath());
}
} finally {
filter.destroy();
}
}
public void testDoFilterAuthentication() throws Exception {
_testDoFilterAuthentication(false);
}
public void testDoFilterAuthenticationWithDomainPath() throws Exception {
_testDoFilterAuthentication(true);
}
public void testDoFilterAuthenticated() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
token.setExpires(System.currentTimeMillis() + 1000);
Signer signer = new Signer("alfredo".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
FilterChain chain = Mockito.mock(FilterChain.class);
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
HttpServletRequest request = (HttpServletRequest) args[0];
assertEquals("u", request.getRemoteUser());
assertEquals("p", request.getUserPrincipal().getName());
return null;
}
}
).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject());
filter.doFilter(request, response, chain);
} finally {
filter.destroy();
}
}
public void testDoFilterAuthenticatedExpired() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
token.setExpires(System.currentTimeMillis() - 1000);
Signer signer = new Signer("alfredo".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
FilterChain chain = Mockito.mock(FilterChain.class);
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
fail();
return null;
}
}
).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject());
final Cookie[] setCookie = new Cookie[1];
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
setCookie[0] = (Cookie) args[0];
return null;
}
}
).when(response).addCookie(Mockito.<Cookie>anyObject());
filter.doFilter(request, response, chain);
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
assertNotNull(setCookie[0]);
assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
assertEquals("", setCookie[0].getValue());
} finally {
filter.destroy();
}
}
public void testDoFilterAuthenticatedInvalidType() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
token.setExpires(System.currentTimeMillis() + 1000);
Signer signer = new Signer("alfredo".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
FilterChain chain = Mockito.mock(FilterChain.class);
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
fail();
return null;
}
}
).when(chain).doFilter(Mockito.<ServletRequest>anyObject(), Mockito.<ServletResponse>anyObject());
final Cookie[] setCookie = new Cookie[1];
Mockito.doAnswer(
new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
setCookie[0] = (Cookie) args[0];
return null;
}
}
).when(response).addCookie(Mockito.<Cookie>anyObject());
filter.doFilter(request, response, chain);
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
assertNotNull(setCookie[0]);
assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
assertEquals("", setCookie[0].getValue());
} finally {
filter.destroy();
}
}
}

View File

@ -0,0 +1,124 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import junit.framework.TestCase;
public class TestAuthenticationToken extends TestCase {
public void testAnonymous() {
assertNotNull(AuthenticationToken.ANONYMOUS);
assertEquals(null, AuthenticationToken.ANONYMOUS.getUserName());
assertEquals(null, AuthenticationToken.ANONYMOUS.getName());
assertEquals(null, AuthenticationToken.ANONYMOUS.getType());
assertEquals(-1, AuthenticationToken.ANONYMOUS.getExpires());
assertFalse(AuthenticationToken.ANONYMOUS.isExpired());
}
public void testConstructor() throws Exception {
try {
new AuthenticationToken(null, "p", "t");
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
try {
new AuthenticationToken("", "p", "t");
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
try {
new AuthenticationToken("u", null, "t");
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
try {
new AuthenticationToken("u", "", "t");
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
try {
new AuthenticationToken("u", "p", null);
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
try {
new AuthenticationToken("u", "p", "");
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
new AuthenticationToken("u", "p", "t");
}
public void testGetters() throws Exception {
long expires = System.currentTimeMillis() + 50;
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
token.setExpires(expires);
assertEquals("u", token.getUserName());
assertEquals("p", token.getName());
assertEquals("t", token.getType());
assertEquals(expires, token.getExpires());
assertFalse(token.isExpired());
Thread.sleep(51);
assertTrue(token.isExpired());
}
public void testToStringAndParse() throws Exception {
long expires = System.currentTimeMillis() + 50;
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
token.setExpires(expires);
String str = token.toString();
token = AuthenticationToken.parse(str);
assertEquals("p", token.getName());
assertEquals("t", token.getType());
assertEquals(expires, token.getExpires());
assertFalse(token.isExpired());
Thread.sleep(51);
assertTrue(token.isExpired());
}
public void testParseInvalid() throws Exception {
long expires = System.currentTimeMillis() + 50;
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
token.setExpires(expires);
String str = token.toString();
str = str.substring(0, str.indexOf("e="));
try {
AuthenticationToken.parse(str);
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
}
}
}

View File

@ -0,0 +1,178 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.KerberosTestUtils;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import org.apache.hadoop.alfredo.client.KerberosAuthenticator;
import junit.framework.TestCase;
import org.apache.commons.codec.binary.Base64;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.mockito.Mockito;
import sun.security.jgss.GSSUtil;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Properties;
import java.util.concurrent.Callable;
public class TestKerberosAuthenticationHandler extends TestCase {
private KerberosAuthenticationHandler handler;
@Override
protected void setUp() throws Exception {
super.setUp();
handler = new KerberosAuthenticationHandler();
Properties props = new Properties();
props.setProperty(KerberosAuthenticationHandler.PRINCIPAL, KerberosTestUtils.getServerPrincipal());
props.setProperty(KerberosAuthenticationHandler.KEYTAB, KerberosTestUtils.getKeytabFile());
props.setProperty(KerberosAuthenticationHandler.NAME_RULES,
"RULE:[1:$1@$0](.*@" + KerberosTestUtils.getRealm()+")s/@.*//\n");
try {
handler.init(props);
} catch (Exception ex) {
handler = null;
throw ex;
}
}
@Override
protected void tearDown() throws Exception {
if (handler != null) {
handler.destroy();
handler = null;
}
super.tearDown();
}
public void testInit() throws Exception {
assertEquals(KerberosTestUtils.getServerPrincipal(), handler.getPrincipal());
assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
}
public void testType() throws Exception {
KerberosAuthenticationHandler handler = new KerberosAuthenticationHandler();
assertEquals(KerberosAuthenticationHandler.TYPE, handler.getType());
}
public void testRequestWithoutAuthorization() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
assertNull(handler.authenticate(request, response));
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
public void testRequestWithInvalidAuthorization() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION)).thenReturn("invalid");
assertNull(handler.authenticate(request, response));
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
public void testRequestWithIncompleteAuthorization() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION))
.thenReturn(KerberosAuthenticator.NEGOTIATE);
try {
handler.authenticate(request, response);
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
}
}
public void testRequestWithAuthorization() throws Exception {
String token = KerberosTestUtils.doAsClient(new Callable<String>() {
@Override
public String call() throws Exception {
GSSManager gssManager = GSSManager.getInstance();
GSSContext gssContext = null;
try {
String servicePrincipal = KerberosTestUtils.getServerPrincipal();
GSSName serviceName = gssManager.createName(servicePrincipal, GSSUtil.NT_GSS_KRB5_PRINCIPAL);
gssContext = gssManager.createContext(serviceName, GSSUtil.GSS_KRB5_MECH_OID, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestCredDeleg(true);
gssContext.requestMutualAuth(true);
byte[] inToken = new byte[0];
byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
Base64 base64 = new Base64(0);
return base64.encodeToString(outToken);
} finally {
if (gssContext != null) {
gssContext.dispose();
}
}
}
});
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION))
.thenReturn(KerberosAuthenticator.NEGOTIATE + " " + token);
AuthenticationToken authToken = handler.authenticate(request, response);
if (authToken != null) {
Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
assertEquals(KerberosTestUtils.getClientPrincipal(), authToken.getName());
assertTrue(KerberosTestUtils.getClientPrincipal().startsWith(authToken.getUserName()));
assertEquals(KerberosAuthenticationHandler.TYPE, authToken.getType());
} else {
Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
}
public void testRequestWithInvalidKerberosAuthorization() throws Exception {
String token = new Base64(0).encodeToString(new byte[]{0, 1, 2});
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION)).thenReturn(
KerberosAuthenticator.NEGOTIATE + token);
try {
handler.authenticate(request, response);
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
}
}
}

View File

@ -0,0 +1,113 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.server;
import org.apache.hadoop.alfredo.client.AuthenticationException;
import junit.framework.TestCase;
import org.apache.hadoop.alfredo.client.PseudoAuthenticator;
import org.mockito.Mockito;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Properties;
public class TestPseudoAuthenticationHandler extends TestCase {
public void testInit() throws Exception {
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
try {
Properties props = new Properties();
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
handler.init(props);
assertEquals(false, handler.getAcceptAnonymous());
} finally {
handler.destroy();
}
}
public void testType() throws Exception {
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
assertEquals(PseudoAuthenticationHandler.TYPE, handler.getType());
}
public void testAnonymousOn() throws Exception {
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
try {
Properties props = new Properties();
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
handler.init(props);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
AuthenticationToken token = handler.authenticate(request, response);
assertEquals(AuthenticationToken.ANONYMOUS, token);
} finally {
handler.destroy();
}
}
public void testAnonymousOff() throws Exception {
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
try {
Properties props = new Properties();
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
handler.init(props);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
handler.authenticate(request, response);
fail();
} catch (AuthenticationException ex) {
// Expected
} catch (Exception ex) {
fail();
} finally {
handler.destroy();
}
}
private void _testUserName(boolean anonymous) throws Exception {
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
try {
Properties props = new Properties();
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, Boolean.toString(anonymous));
handler.init(props);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getParameter(PseudoAuthenticator.USER_NAME)).thenReturn("user");
AuthenticationToken token = handler.authenticate(request, response);
assertNotNull(token);
assertEquals("user", token.getUserName());
assertEquals("user", token.getName());
assertEquals(PseudoAuthenticationHandler.TYPE, token.getType());
} finally {
handler.destroy();
}
}
public void testUserNameAnonymousOff() throws Exception {
_testUserName(false);
}
public void testUserNameAnonymousOn() throws Exception {
_testUserName(true);
}
}

View File

@ -1,3 +1,5 @@
package org.apache.hadoop.alfredo.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -16,11 +18,9 @@
* limitations under the License.
*/
package org.apache.hadoop.security;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.alfredo.KerberosTestUtils;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
@ -29,15 +29,13 @@ public class TestKerberosName {
@Before
public void setUp() throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.security.auth_to_local",
("RULE:[1:$1@$0](.*@YAHOO\\.COM)s/@.*//\n" +
"RULE:[2:$1](johndoe)s/^.*$/guest/\n" +
"RULE:[2:$1;$2](^.*;admin$)s/;admin$//\n" +
"RULE:[2:$2](root)\n" +
"DEFAULT"));
conf.set("hadoop.security.authentication", "kerberos");
KerberosName.setConfiguration(conf);
String rules =
"RULE:[1:$1@$0](.*@YAHOO\\.COM)s/@.*//\n" +
"RULE:[2:$1](johndoe)s/^.*$/guest/\n" +
"RULE:[2:$1;$2](^.*;admin$)s/;admin$//\n" +
"RULE:[2:$2](root)\n" +
"DEFAULT";
KerberosName.setRules(rules);
KerberosName.printRules();
}
@ -51,14 +49,14 @@ public class TestKerberosName {
@Test
public void testRules() throws Exception {
checkTranslation("omalley@APACHE.ORG", "omalley");
checkTranslation("hdfs/10.0.0.1@APACHE.ORG", "hdfs");
checkTranslation("omalley@" + KerberosTestUtils.getRealm(), "omalley");
checkTranslation("hdfs/10.0.0.1@" + KerberosTestUtils.getRealm(), "hdfs");
checkTranslation("oom@YAHOO.COM", "oom");
checkTranslation("johndoe/zoo@FOO.COM", "guest");
checkTranslation("joe/admin@FOO.COM", "joe");
checkTranslation("joe/root@FOO.COM", "root");
}
private void checkBadName(String name) {
System.out.println("Checking " + name + " to ensure it is bad.");
try {
@ -68,7 +66,7 @@ public class TestKerberosName {
// PASS
}
}
private void checkBadTranslation(String from) {
System.out.println("Checking bad translation for " + from);
KerberosName nm = new KerberosName(from);
@ -79,7 +77,7 @@ public class TestKerberosName {
// PASS
}
}
@Test
public void testAntiPatterns() throws Exception {
checkBadName("owen/owen/owen@FOO.COM");

View File

@ -0,0 +1,93 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.alfredo.util;
import junit.framework.TestCase;
public class TestSigner extends TestCase {
public void testNoSecret() throws Exception {
try {
new Signer(null);
fail();
}
catch (IllegalArgumentException ex) {
}
}
public void testNullAndEmptyString() throws Exception {
Signer signer = new Signer("secret".getBytes());
try {
signer.sign(null);
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
try {
signer.sign("");
fail();
} catch (IllegalArgumentException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
}
public void testSignature() throws Exception {
Signer signer = new Signer("secret".getBytes());
String s1 = signer.sign("ok");
String s2 = signer.sign("ok");
String s3 = signer.sign("wrong");
assertEquals(s1, s2);
assertNotSame(s1, s3);
}
public void testVerify() throws Exception {
Signer signer = new Signer("secret".getBytes());
String t = "test";
String s = signer.sign(t);
String e = signer.verifyAndExtract(s);
assertEquals(t, e);
}
public void testInvalidSignedText() throws Exception {
Signer signer = new Signer("secret".getBytes());
try {
signer.verifyAndExtract("test");
fail();
} catch (SignerException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
}
public void testTampering() throws Exception {
Signer signer = new Signer("secret".getBytes());
String t = "test";
String s = signer.sign(t);
s += "x";
try {
signer.verifyAndExtract(s);
fail();
} catch (SignerException ex) {
// Expected
} catch (Throwable ex) {
fail();
}
}
}

View File

@ -0,0 +1,28 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[libdefaults]
default_realm = ${kerberos.realm}
udp_preference_limit = 1
extra_addresses = 127.0.0.1
[realms]
${kerberos.realm} = {
admin_server = localhost:88
kdc = localhost:88
}
[domain_realm]
localhost = ${kerberos.realm}

View File

@ -2,6 +2,8 @@ Hadoop Change Log
Trunk (unreleased changes)
Release 0.23.0 - Unreleased
INCOMPATIBLE CHANGES
HADOOP-6904. Support method based RPC compatiblity. (hairong)
@ -63,6 +65,9 @@ Trunk (unreleased changes)
HADOOP-6385. dfs should support -rmdir (was HDFS-639). (Daryn Sharp
via mattf)
HADOOP-7119. add Kerberos HTTP SPNEGO authentication support to Hadoop
JT/NN/DN/TT web-consoles. (Alejandro Abdelnur via atm)
IMPROVEMENTS
HADOOP-7042. Updates to test-patch.sh to include failed test names and
@ -331,6 +336,17 @@ Trunk (unreleased changes)
HADOOP-7264. Bump avro version to at least 1.4.1. (Alejandro Abdelnur via
tomwhite)
HADOOP-7498. Remove legacy TAR layout creation. (Alejandro Abdelnur via
tomwhite)
HADOOP-7496. Break Maven TAR & bintar profiles into just LAYOUT & TAR proper.
(Alejandro Abdelnur via tomwhite)
HADOOP-7561. Make test-patch only run tests for changed modules. (tomwhite)
HADOOP-7547. Add generic type in WritableComparable subclasses.
(Uma Maheswara Rao G via szetszwo)
OPTIMIZATIONS
HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@ -428,9 +444,6 @@ Trunk (unreleased changes)
HADOOP-7389. Use of TestingGroups by tests causes subsequent tests to fail.
(atm via tomwhite)
HADOOP-7390. VersionInfo not generated properly in git after unsplit. (todd
via atm)
HADOOP-7377. Fix command name handling affecting DFSAdmin. (Daryn Sharp
via mattf)
@ -508,6 +521,18 @@ Trunk (unreleased changes)
HADOOP-7536. Correct the dependency version regressions introduced in
HADOOP-6671. (Alejandro Abdelnur via tomwhite)
HADOOP-7566. MR tests are failing webapps/hdfs not found in CLASSPATH.
(Alejandro Abdelnur via mahadev)
HADOOP-7567. 'mvn eclipse:eclipse' fails for hadoop-alfredo (auth).
(Alejandro Abdelnur via tomwhite)
HADOOP-7563. Setup HADOOP_HDFS_HOME, HADOOP_MAPRED_HOME and classpath
correction. (Eric Yang via acmurthy)
HADOOP-7560. Change src layout to be heirarchical. (Alejandro Abdelnur
via acmurthy)
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES
@ -1017,6 +1042,9 @@ Release 0.22.0 - Unreleased
HADOOP-7349. HADOOP-7121 accidentally disabled some tests in TestIPC.
(todd)
HADOOP-7390. VersionInfo not generated properly in git after unsplit. (todd
via atm)
Release 0.21.1 - Unreleased
IMPROVEMENTS

View File

@ -16,13 +16,13 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-distro</artifactId>
<version>0.23.0-SNAPSHOT</version>
<relativePath>../hadoop-project-distro</relativePath>
<artifactId>hadoop-project-dist</artifactId>
<version>0.24.0-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>0.23.0-SNAPSHOT</version>
<version>0.24.0-SNAPSHOT</version>
<description>Apache Hadoop Common</description>
<name>Apache Hadoop Common</name>
<packaging>jar</packaging>
@ -237,6 +237,11 @@
<artifactId>protobuf-java</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-alfredo</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>
<build>

View File

@ -21,11 +21,7 @@ bin=`which $0`
bin=`dirname ${bin}`
bin=`cd "$bin"; pwd`
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
. "$bin"/../libexec/hadoop-config.sh
else
. "$bin"/hadoop-config.sh
fi
. "$bin"/../libexec/hadoop-config.sh
function print_usage(){
echo "Usage: hadoop [--config confdir] COMMAND"

View File

@ -139,64 +139,19 @@ fi
# CLASSPATH initially contains $HADOOP_CONF_DIR
CLASSPATH="${HADOOP_CONF_DIR}"
# for developers, add Hadoop classes to CLASSPATH
if [ -d "$HADOOP_PREFIX/build/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
fi
if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
fi
if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
fi
if [ -d "$HADOOP_PREFIX/build/test/core/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/core/classes
fi
# so that filenames w/ spaces are handled correctly in loops below
IFS=
# for releases, add core hadoop jar & webapps to CLASSPATH
if [ -d "$HADOOP_PREFIX/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX
if [ -d "$HADOOP_PREFIX/share/hadoop/common/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common/webapps
fi
if [ -d "$HADOOP_PREFIX/share/hadoop/common/lib" ]; then
for f in $HADOOP_PREFIX/share/hadoop/common/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common/lib'/*'
fi
for f in $HADOOP_PREFIX/share/hadoop/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# for developers, add libs to CLASSPATH
for f in $HADOOP_PREFIX/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
if [ -d "$HADOOP_PREFIX/build/ivy/lib/Hadoop-Common/common" ]; then
for f in $HADOOP_PREFIX/build/ivy/lib/Hadoop-Common/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
if [ -d "$HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/hdfs" ]; then
for f in $HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/hdfs/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
if [ -d "$HADOOP_PREFIX/build/ivy/lib/Hadoop/mapred" ]; then
for f in $HADOOP_PREFIX/build/ivy/lib/Hadoop/mapred/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
for f in $HADOOP_PREFIX/lib/jsp-2.1/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common'/*'
# add user-specified CLASSPATH last
if [ "$HADOOP_CLASSPATH" != "" ]; then
@ -274,37 +229,20 @@ HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
# put hdfs in classpath if present
if [ "$HADOOP_HDFS_HOME" = "" ]; then
if [ -d "${HADOOP_PREFIX}/share/hadoop/hdfs" ]; then
HADOOP_HDFS_HOME=$HADOOP_PREFIX/share/hadoop/hdfs
#echo Found HDFS installed at $HADOOP_HDFS_HOME
HADOOP_HDFS_HOME=$HADOOP_PREFIX
fi
fi
if [ -d "${HADOOP_HDFS_HOME}" ]; then
if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
fi
if [ ! -d "${HADOOP_CONF_DIR}" ] && [ -d "${HADOOP_HDFS_HOME}/conf" ]; then
CLASSPATH=${CLASSPATH}:${HADOOP_HDFS_HOME}/conf
fi
for f in $HADOOP_HDFS_HOME/hadoop-hdfs-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# add libs to CLASSPATH
if [ -d "${HADOOP_HDFS_HOME}/lib" ]; then
for f in $HADOOP_HDFS_HOME/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
fi
if [ -d "$HADOOP_HDFS_HOME/share/hadoop/hdfs/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs
fi
if [ -d "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib'/*'
fi
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs'/*'
# cygwin path translation
if $cygwin; then
HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`
@ -313,44 +251,16 @@ fi
# set mapred home if mapred is present
if [ "$HADOOP_MAPRED_HOME" = "" ]; then
if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
HADOOP_MAPRED_HOME=$HADOOP_PREFIX/share/hadoop/mapreduce
HADOOP_MAPRED_HOME=$HADOOP_PREFIX
fi
fi
if [ -d "${HADOOP_MAPRED_HOME}" ]; then
if [ -d "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/webapps
fi
if [ -d "$HADOOP_MAPRED_HOME/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
fi
if [ ! -d "${HADOOP_CONF_DIR}" ] && [ -d "${HADOOP_MAPRED_HOME}/conf" ]; then
CLASSPATH=${CLASSPATH}:${HADOOP_MAPRED_HOME}/conf
fi
for f in $HADOOP_MAPRED_HOME/hadoop-mapreduce-*.jar; do
CLASSPATH=${CLASSPATH}:$f
done
if [ -d "${HADOOP_MAPRED_HOME}/lib" ]; then
for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f
done
fi
if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes
fi
if [ -d "$HADOOP_MAPRED_HOME/build/tools" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools
fi
for f in $HADOOP_MAPRED_HOME/hadoop-mapreduce-tools-*.jar; do
TOOL_PATH=${TOOL_PATH}:$f;
done
for f in $HADOOP_MAPRED_HOME/build/hadoop-mapreduce-tools-*.jar; do
TOOL_PATH=${TOOL_PATH}:$f;
done
if [ -d "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib'/*'
fi
# cygwin path translation

View File

@ -39,11 +39,7 @@ fi
bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd`
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
. "$bin"/../libexec/hadoop-config.sh
else
. "$bin"/hadoop-config.sh
fi
. "$bin"/../libexec/hadoop-config.sh
# get arguments

View File

@ -29,10 +29,6 @@ fi
bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd`
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
. "$bin"/../libexec/hadoop-config.sh
else
. "$bin"/hadoop-config.sh
fi
. "$bin"/../libexec/hadoop-config.sh
exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_PREFIX" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"

View File

@ -50,40 +50,6 @@ fi
JAVA=$JAVA_HOME/bin/java
JAVA_HEAP_MAX=-Xmx1000m
# CLASSPATH initially contains $HADOOP_CONF_DIR
CLASSPATH="${HADOOP_CONF_DIR}"
# for developers, add Hadoop classes to CLASSPATH
if [ -d "$HADOOP_PREFIX/build/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
fi
if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
fi
if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
fi
# so that filenames w/ spaces are handled correctly in loops below
IFS=
# for releases, add core hadoop jar & webapps to CLASSPATH
if [ -d "$HADOOP_PREFIX/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX
fi
for f in $HADOOP_PREFIX/hadoop-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# add libs to CLASSPATH
for f in $HADOOP_PREFIX/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
for f in $HADOOP_PREFIX/lib/jetty-ext/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# restore ordinary behaviour
unset IFS

View File

@ -38,11 +38,7 @@ fi
bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd`
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
. "$bin"/../libexec/hadoop-config.sh
else
. "$bin"/hadoop-config.sh
fi
. "$bin"/../libexec/hadoop-config.sh
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
. "${HADOOP_CONF_DIR}/hadoop-env.sh"

View File

@ -23,11 +23,7 @@ echo "This script is Deprecated. Instead use start-dfs.sh and start-mapred.sh"
bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd`
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
. "$bin"/../libexec/hadoop-config.sh
else
. "$bin"/hadoop-config.sh
fi
. "$bin"/../libexec/hadoop-config.sh
# start hdfs daemons if hdfs is present
if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then

View File

@ -23,11 +23,7 @@ echo "This script is Deprecated. Instead use stop-dfs.sh and stop-mapred.sh"
bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd`
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
. "$bin"/../libexec/hadoop-config.sh
else
. "$bin"/hadoop-config.sh
fi
. "$bin"/../libexec/hadoop-config.sh
# stop hdfs daemons if hdfs is present
if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then

Some files were not shown because too many files have changed in this diff Show More