Merge all changes from trunk to branch HDFS-2832
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-2832@1516230 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
075995a201
12
BUILDING.txt
12
BUILDING.txt
|
@ -7,7 +7,7 @@ Requirements:
|
||||||
* JDK 1.6
|
* JDK 1.6
|
||||||
* Maven 3.0
|
* Maven 3.0
|
||||||
* Findbugs 1.3.9 (if running findbugs)
|
* Findbugs 1.3.9 (if running findbugs)
|
||||||
* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS)
|
* ProtocolBuffer 2.5.0
|
||||||
* CMake 2.6 or newer (if compiling native code)
|
* CMake 2.6 or newer (if compiling native code)
|
||||||
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
|
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
|
||||||
|
|
||||||
|
@ -99,6 +99,16 @@ level once; and then work from the submodule. Keep in mind that SNAPSHOTs
|
||||||
time out after a while, using the Maven '-nsu' will stop Maven from trying
|
time out after a while, using the Maven '-nsu' will stop Maven from trying
|
||||||
to update SNAPSHOTs from external repos.
|
to update SNAPSHOTs from external repos.
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------
|
||||||
|
Protocol Buffer compiler
|
||||||
|
|
||||||
|
The version of Protocol Buffer compiler, protoc, must match the version of the
|
||||||
|
protobuf JAR.
|
||||||
|
|
||||||
|
If you have multiple versions of protoc in your system, you can set in your
|
||||||
|
build shell the HADOOP_PROTOC_PATH environment variable to point to the one you
|
||||||
|
want to use for the Hadoop build. If you don't define this environment variable,
|
||||||
|
protoc is looked up in the PATH.
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
Importing projects to eclipse
|
Importing projects to eclipse
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,6 @@
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<maven.build.timestamp.format>yyyyMMdd</maven.build.timestamp.format>
|
<maven.build.timestamp.format>yyyyMMdd</maven.build.timestamp.format>
|
||||||
<kerberos.realm>LOCALHOST</kerberos.realm>
|
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
@ -83,38 +82,15 @@
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
<scope>runtime</scope>
|
<scope>runtime</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-minikdc</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<testResources>
|
|
||||||
<testResource>
|
|
||||||
<directory>${basedir}/src/test/resources</directory>
|
|
||||||
<filtering>true</filtering>
|
|
||||||
<includes>
|
|
||||||
<include>krb5.conf</include>
|
|
||||||
</includes>
|
|
||||||
</testResource>
|
|
||||||
</testResources>
|
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<forkMode>always</forkMode>
|
|
||||||
<forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
|
|
||||||
<systemPropertyVariables>
|
|
||||||
<java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
|
|
||||||
<kerberos.realm>${kerberos.realm}</kerberos.realm>
|
|
||||||
</systemPropertyVariables>
|
|
||||||
<excludes>
|
|
||||||
<exclude>**/${test.exclude}.java</exclude>
|
|
||||||
<exclude>${test.exclude.pattern}</exclude>
|
|
||||||
<exclude>**/TestKerberosAuth*.java</exclude>
|
|
||||||
<exclude>**/TestAltKerberosAuth*.java</exclude>
|
|
||||||
<exclude>**/Test*$*.java</exclude>
|
|
||||||
</excludes>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-source-plugin</artifactId>
|
<artifactId>maven-source-plugin</artifactId>
|
||||||
|
@ -134,33 +110,6 @@
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
<profiles>
|
<profiles>
|
||||||
<profile>
|
|
||||||
<id>testKerberos</id>
|
|
||||||
<activation>
|
|
||||||
<activeByDefault>false</activeByDefault>
|
|
||||||
</activation>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<forkMode>always</forkMode>
|
|
||||||
<forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
|
|
||||||
<systemPropertyVariables>
|
|
||||||
<java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
|
|
||||||
<kerberos.realm>${kerberos.realm}</kerberos.realm>
|
|
||||||
</systemPropertyVariables>
|
|
||||||
<excludes>
|
|
||||||
<exclude>**/${test.exclude}.java</exclude>
|
|
||||||
<exclude>${test.exclude.pattern}</exclude>
|
|
||||||
<exclude>**/Test*$*.java</exclude>
|
|
||||||
</excludes>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
<profile>
|
<profile>
|
||||||
<id>docs</id>
|
<id>docs</id>
|
||||||
<activation>
|
<activation>
|
||||||
|
|
|
@ -13,7 +13,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication;
|
package org.apache.hadoop.security.authentication;
|
||||||
|
|
||||||
|
|
||||||
import javax.security.auth.Subject;
|
import javax.security.auth.Subject;
|
||||||
import javax.security.auth.kerberos.KerberosPrincipal;
|
import javax.security.auth.kerberos.KerberosPrincipal;
|
||||||
import javax.security.auth.login.AppConfigurationEntry;
|
import javax.security.auth.login.AppConfigurationEntry;
|
||||||
|
@ -26,6 +25,7 @@ import java.io.File;
|
||||||
import java.security.Principal;
|
import java.security.Principal;
|
||||||
import java.security.PrivilegedActionException;
|
import java.security.PrivilegedActionException;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
import java.util.UUID;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -36,32 +36,23 @@ import java.util.concurrent.Callable;
|
||||||
* Test helper class for Java Kerberos setup.
|
* Test helper class for Java Kerberos setup.
|
||||||
*/
|
*/
|
||||||
public class KerberosTestUtils {
|
public class KerberosTestUtils {
|
||||||
private static final String PREFIX = "hadoop-auth.test.";
|
private static String keytabFile = new File(System.getProperty("test.dir", "target"),
|
||||||
|
UUID.randomUUID().toString()).toString();
|
||||||
public static final String REALM = PREFIX + "kerberos.realm";
|
|
||||||
|
|
||||||
public static final String CLIENT_PRINCIPAL = PREFIX + "kerberos.client.principal";
|
|
||||||
|
|
||||||
public static final String SERVER_PRINCIPAL = PREFIX + "kerberos.server.principal";
|
|
||||||
|
|
||||||
public static final String KEYTAB_FILE = PREFIX + "kerberos.keytab.file";
|
|
||||||
|
|
||||||
public static String getRealm() {
|
public static String getRealm() {
|
||||||
return System.getProperty(REALM, "LOCALHOST");
|
return "EXAMPLE.COM";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getClientPrincipal() {
|
public static String getClientPrincipal() {
|
||||||
return System.getProperty(CLIENT_PRINCIPAL, "client") + "@" + getRealm();
|
return "client@EXAMPLE.COM";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getServerPrincipal() {
|
public static String getServerPrincipal() {
|
||||||
return System.getProperty(SERVER_PRINCIPAL, "HTTP/localhost") + "@" + getRealm();
|
return "HTTP/localhost@EXAMPLE.COM";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getKeytabFile() {
|
public static String getKeytabFile() {
|
||||||
String keytabFile =
|
return keytabFile;
|
||||||
new File(System.getProperty("user.home"), System.getProperty("user.name") + ".keytab").toString();
|
|
||||||
return System.getProperty(KEYTAB_FILE, keytabFile);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class KerberosConfiguration extends Configuration {
|
private static class KerberosConfiguration extends Configuration {
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
* You may obtain a copy of the License at
|
* You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
*
|
*
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@ -13,10 +13,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.client;
|
package org.apache.hadoop.security.authentication.client;
|
||||||
|
|
||||||
import junit.framework.Assert;
|
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.mockito.Mockito;
|
|
||||||
import org.mortbay.jetty.Server;
|
import org.mortbay.jetty.Server;
|
||||||
import org.mortbay.jetty.servlet.Context;
|
import org.mortbay.jetty.servlet.Context;
|
||||||
import org.mortbay.jetty.servlet.FilterHolder;
|
import org.mortbay.jetty.servlet.FilterHolder;
|
||||||
|
@ -27,19 +24,20 @@ import javax.servlet.ServletException;
|
||||||
import javax.servlet.http.HttpServlet;
|
import javax.servlet.http.HttpServlet;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.ServerSocket;
|
import java.net.ServerSocket;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
public abstract class AuthenticatorTestCase extends TestCase {
|
public class AuthenticatorTestCase {
|
||||||
private Server server;
|
private Server server;
|
||||||
private String host = null;
|
private String host = null;
|
||||||
private int port = -1;
|
private int port = -1;
|
||||||
|
@ -151,18 +149,18 @@ public abstract class AuthenticatorTestCase extends TestCase {
|
||||||
writer.write(POST);
|
writer.write(POST);
|
||||||
writer.close();
|
writer.close();
|
||||||
}
|
}
|
||||||
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||||
if (doPost) {
|
if (doPost) {
|
||||||
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
|
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
|
||||||
String echo = reader.readLine();
|
String echo = reader.readLine();
|
||||||
assertEquals(POST, echo);
|
Assert.assertEquals(POST, echo);
|
||||||
assertNull(reader.readLine());
|
Assert.assertNull(reader.readLine());
|
||||||
}
|
}
|
||||||
aUrl = new AuthenticatedURL();
|
aUrl = new AuthenticatedURL();
|
||||||
conn = aUrl.openConnection(url, token);
|
conn = aUrl.openConnection(url, token);
|
||||||
conn.connect();
|
conn.connect();
|
||||||
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||||
assertEquals(tokenStr, token.toString());
|
Assert.assertEquals(tokenStr, token.toString());
|
||||||
} finally {
|
} finally {
|
||||||
stop();
|
stop();
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,8 +13,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.client;
|
package org.apache.hadoop.security.authentication.client;
|
||||||
|
|
||||||
import junit.framework.Assert;
|
import org.junit.Assert;
|
||||||
import junit.framework.TestCase;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
|
@ -24,46 +24,48 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class TestAuthenticatedURL extends TestCase {
|
public class TestAuthenticatedURL {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testToken() throws Exception {
|
public void testToken() throws Exception {
|
||||||
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
||||||
assertFalse(token.isSet());
|
Assert.assertFalse(token.isSet());
|
||||||
token = new AuthenticatedURL.Token("foo");
|
token = new AuthenticatedURL.Token("foo");
|
||||||
assertTrue(token.isSet());
|
Assert.assertTrue(token.isSet());
|
||||||
assertEquals("foo", token.toString());
|
Assert.assertEquals("foo", token.toString());
|
||||||
|
|
||||||
AuthenticatedURL.Token token1 = new AuthenticatedURL.Token();
|
AuthenticatedURL.Token token1 = new AuthenticatedURL.Token();
|
||||||
AuthenticatedURL.Token token2 = new AuthenticatedURL.Token();
|
AuthenticatedURL.Token token2 = new AuthenticatedURL.Token();
|
||||||
assertEquals(token1.hashCode(), token2.hashCode());
|
Assert.assertEquals(token1.hashCode(), token2.hashCode());
|
||||||
assertTrue(token1.equals(token2));
|
Assert.assertTrue(token1.equals(token2));
|
||||||
|
|
||||||
token1 = new AuthenticatedURL.Token();
|
token1 = new AuthenticatedURL.Token();
|
||||||
token2 = new AuthenticatedURL.Token("foo");
|
token2 = new AuthenticatedURL.Token("foo");
|
||||||
assertNotSame(token1.hashCode(), token2.hashCode());
|
Assert.assertNotSame(token1.hashCode(), token2.hashCode());
|
||||||
assertFalse(token1.equals(token2));
|
Assert.assertFalse(token1.equals(token2));
|
||||||
|
|
||||||
token1 = new AuthenticatedURL.Token("foo");
|
token1 = new AuthenticatedURL.Token("foo");
|
||||||
token2 = new AuthenticatedURL.Token();
|
token2 = new AuthenticatedURL.Token();
|
||||||
assertNotSame(token1.hashCode(), token2.hashCode());
|
Assert.assertNotSame(token1.hashCode(), token2.hashCode());
|
||||||
assertFalse(token1.equals(token2));
|
Assert.assertFalse(token1.equals(token2));
|
||||||
|
|
||||||
token1 = new AuthenticatedURL.Token("foo");
|
token1 = new AuthenticatedURL.Token("foo");
|
||||||
token2 = new AuthenticatedURL.Token("foo");
|
token2 = new AuthenticatedURL.Token("foo");
|
||||||
assertEquals(token1.hashCode(), token2.hashCode());
|
Assert.assertEquals(token1.hashCode(), token2.hashCode());
|
||||||
assertTrue(token1.equals(token2));
|
Assert.assertTrue(token1.equals(token2));
|
||||||
|
|
||||||
token1 = new AuthenticatedURL.Token("bar");
|
token1 = new AuthenticatedURL.Token("bar");
|
||||||
token2 = new AuthenticatedURL.Token("foo");
|
token2 = new AuthenticatedURL.Token("foo");
|
||||||
assertNotSame(token1.hashCode(), token2.hashCode());
|
Assert.assertNotSame(token1.hashCode(), token2.hashCode());
|
||||||
assertFalse(token1.equals(token2));
|
Assert.assertFalse(token1.equals(token2));
|
||||||
|
|
||||||
token1 = new AuthenticatedURL.Token("foo");
|
token1 = new AuthenticatedURL.Token("foo");
|
||||||
token2 = new AuthenticatedURL.Token("bar");
|
token2 = new AuthenticatedURL.Token("bar");
|
||||||
assertNotSame(token1.hashCode(), token2.hashCode());
|
Assert.assertNotSame(token1.hashCode(), token2.hashCode());
|
||||||
assertFalse(token1.equals(token2));
|
Assert.assertFalse(token1.equals(token2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testInjectToken() throws Exception {
|
public void testInjectToken() throws Exception {
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
||||||
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
||||||
|
@ -72,6 +74,7 @@ public class TestAuthenticatedURL extends TestCase {
|
||||||
Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString());
|
Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testExtractTokenOK() throws Exception {
|
public void testExtractTokenOK() throws Exception {
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
||||||
|
|
||||||
|
@ -87,9 +90,10 @@ public class TestAuthenticatedURL extends TestCase {
|
||||||
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
||||||
AuthenticatedURL.extractToken(conn, token);
|
AuthenticatedURL.extractToken(conn, token);
|
||||||
|
|
||||||
assertEquals(tokenStr, token.toString());
|
Assert.assertEquals(tokenStr, token.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testExtractTokenFail() throws Exception {
|
public void testExtractTokenFail() throws Exception {
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
||||||
|
|
||||||
|
@ -106,15 +110,16 @@ public class TestAuthenticatedURL extends TestCase {
|
||||||
token.set("bar");
|
token.set("bar");
|
||||||
try {
|
try {
|
||||||
AuthenticatedURL.extractToken(conn, token);
|
AuthenticatedURL.extractToken(conn, token);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
Assert.assertFalse(token.isSet());
|
Assert.assertFalse(token.isSet());
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConnectionConfigurator() throws Exception {
|
public void testConnectionConfigurator() throws Exception {
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
||||||
Mockito.when(conn.getResponseCode()).
|
Mockito.when(conn.getResponseCode()).
|
||||||
|
|
|
@ -13,17 +13,33 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.client;
|
package org.apache.hadoop.security.authentication.client;
|
||||||
|
|
||||||
|
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
|
||||||
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
|
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
|
||||||
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
|
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
public class TestKerberosAuthenticator extends AuthenticatorTestCase {
|
public class TestKerberosAuthenticator extends KerberosSecurityTestcase {
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setup() throws Exception {
|
||||||
|
// create keytab
|
||||||
|
File keytabFile = new File(KerberosTestUtils.getKeytabFile());
|
||||||
|
String clientPrincipal = KerberosTestUtils.getClientPrincipal();
|
||||||
|
String serverPrincipal = KerberosTestUtils.getServerPrincipal();
|
||||||
|
clientPrincipal = clientPrincipal.substring(0, clientPrincipal.lastIndexOf("@"));
|
||||||
|
serverPrincipal = serverPrincipal.substring(0, serverPrincipal.lastIndexOf("@"));
|
||||||
|
getKdc().createPrincipal(keytabFile, clientPrincipal, serverPrincipal);
|
||||||
|
}
|
||||||
|
|
||||||
private Properties getAuthenticationHandlerConfiguration() {
|
private Properties getAuthenticationHandlerConfiguration() {
|
||||||
Properties props = new Properties();
|
Properties props = new Properties();
|
||||||
|
@ -35,57 +51,67 @@ public class TestKerberosAuthenticator extends AuthenticatorTestCase {
|
||||||
return props;
|
return props;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testFallbacktoPseudoAuthenticator() throws Exception {
|
public void testFallbacktoPseudoAuthenticator() throws Exception {
|
||||||
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
Properties props = new Properties();
|
Properties props = new Properties();
|
||||||
props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
|
props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
|
||||||
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
|
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
|
||||||
setAuthenticationHandlerConfig(props);
|
auth.setAuthenticationHandlerConfig(props);
|
||||||
_testAuthentication(new KerberosAuthenticator(), false);
|
auth._testAuthentication(new KerberosAuthenticator(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testFallbacktoPseudoAuthenticatorAnonymous() throws Exception {
|
public void testFallbacktoPseudoAuthenticatorAnonymous() throws Exception {
|
||||||
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
Properties props = new Properties();
|
Properties props = new Properties();
|
||||||
props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
|
props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
|
||||||
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
|
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
|
||||||
setAuthenticationHandlerConfig(props);
|
auth.setAuthenticationHandlerConfig(props);
|
||||||
_testAuthentication(new KerberosAuthenticator(), false);
|
auth._testAuthentication(new KerberosAuthenticator(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testNotAuthenticated() throws Exception {
|
public void testNotAuthenticated() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
start();
|
auth.setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
|
||||||
|
auth.start();
|
||||||
try {
|
try {
|
||||||
URL url = new URL(getBaseURL());
|
URL url = new URL(auth.getBaseURL());
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||||
conn.connect();
|
conn.connect();
|
||||||
assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
|
Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
|
||||||
assertTrue(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE) != null);
|
Assert.assertTrue(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE) != null);
|
||||||
} finally {
|
} finally {
|
||||||
stop();
|
auth.stop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testAuthentication() throws Exception {
|
public void testAuthentication() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
|
final AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration());
|
||||||
KerberosTestUtils.doAsClient(new Callable<Void>() {
|
KerberosTestUtils.doAsClient(new Callable<Void>() {
|
||||||
@Override
|
@Override
|
||||||
public Void call() throws Exception {
|
public Void call() throws Exception {
|
||||||
_testAuthentication(new KerberosAuthenticator(), false);
|
auth._testAuthentication(new KerberosAuthenticator(), false);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testAuthenticationPost() throws Exception {
|
public void testAuthenticationPost() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
|
final AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration());
|
||||||
KerberosTestUtils.doAsClient(new Callable<Void>() {
|
KerberosTestUtils.doAsClient(new Callable<Void>() {
|
||||||
@Override
|
@Override
|
||||||
public Void call() throws Exception {
|
public Void call() throws Exception {
|
||||||
_testAuthentication(new KerberosAuthenticator(), true);
|
auth._testAuthentication(new KerberosAuthenticator(), true);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,12 +15,14 @@ package org.apache.hadoop.security.authentication.client;
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
|
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
public class TestPseudoAuthenticator extends AuthenticatorTestCase {
|
public class TestPseudoAuthenticator {
|
||||||
|
|
||||||
private Properties getAuthenticationHandlerConfiguration(boolean anonymousAllowed) {
|
private Properties getAuthenticationHandlerConfiguration(boolean anonymousAllowed) {
|
||||||
Properties props = new Properties();
|
Properties props = new Properties();
|
||||||
|
@ -29,55 +31,74 @@ public class TestPseudoAuthenticator extends AuthenticatorTestCase {
|
||||||
return props;
|
return props;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetUserName() throws Exception {
|
public void testGetUserName() throws Exception {
|
||||||
PseudoAuthenticator authenticator = new PseudoAuthenticator();
|
PseudoAuthenticator authenticator = new PseudoAuthenticator();
|
||||||
assertEquals(System.getProperty("user.name"), authenticator.getUserName());
|
Assert.assertEquals(System.getProperty("user.name"), authenticator.getUserName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAnonymousAllowed() throws Exception {
|
public void testAnonymousAllowed() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
start();
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration(true));
|
||||||
|
auth.start();
|
||||||
try {
|
try {
|
||||||
URL url = new URL(getBaseURL());
|
URL url = new URL(auth.getBaseURL());
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||||
conn.connect();
|
conn.connect();
|
||||||
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||||
} finally {
|
} finally {
|
||||||
stop();
|
auth.stop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAnonymousDisallowed() throws Exception {
|
public void testAnonymousDisallowed() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
start();
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration(false));
|
||||||
|
auth.start();
|
||||||
try {
|
try {
|
||||||
URL url = new URL(getBaseURL());
|
URL url = new URL(auth.getBaseURL());
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||||
conn.connect();
|
conn.connect();
|
||||||
assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
|
Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
|
||||||
} finally {
|
} finally {
|
||||||
stop();
|
auth.stop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAuthenticationAnonymousAllowed() throws Exception {
|
public void testAuthenticationAnonymousAllowed() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
_testAuthentication(new PseudoAuthenticator(), false);
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration(true));
|
||||||
|
auth._testAuthentication(new PseudoAuthenticator(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAuthenticationAnonymousDisallowed() throws Exception {
|
public void testAuthenticationAnonymousDisallowed() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
_testAuthentication(new PseudoAuthenticator(), false);
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration(false));
|
||||||
|
auth._testAuthentication(new PseudoAuthenticator(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAuthenticationAnonymousAllowedWithPost() throws Exception {
|
public void testAuthenticationAnonymousAllowedWithPost() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
_testAuthentication(new PseudoAuthenticator(), true);
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration(true));
|
||||||
|
auth._testAuthentication(new PseudoAuthenticator(), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAuthenticationAnonymousDisallowedWithPost() throws Exception {
|
public void testAuthenticationAnonymousDisallowedWithPost() throws Exception {
|
||||||
setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
|
AuthenticatorTestCase auth = new AuthenticatorTestCase();
|
||||||
_testAuthentication(new PseudoAuthenticator(), true);
|
auth.setAuthenticationHandlerConfig(
|
||||||
|
getAuthenticationHandlerConfiguration(false));
|
||||||
|
auth._testAuthentication(new PseudoAuthenticator(), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,8 @@ import java.util.Properties;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
public class TestAltKerberosAuthenticationHandler
|
public class TestAltKerberosAuthenticationHandler
|
||||||
|
@ -45,6 +47,7 @@ public class TestAltKerberosAuthenticationHandler
|
||||||
return AltKerberosAuthenticationHandler.TYPE;
|
return AltKerberosAuthenticationHandler.TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testAlternateAuthenticationAsBrowser() throws Exception {
|
public void testAlternateAuthenticationAsBrowser() throws Exception {
|
||||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||||
|
@ -54,11 +57,12 @@ public class TestAltKerberosAuthenticationHandler
|
||||||
Mockito.when(request.getHeader("User-Agent")).thenReturn("Some Browser");
|
Mockito.when(request.getHeader("User-Agent")).thenReturn("Some Browser");
|
||||||
|
|
||||||
AuthenticationToken token = handler.authenticate(request, response);
|
AuthenticationToken token = handler.authenticate(request, response);
|
||||||
assertEquals("A", token.getUserName());
|
Assert.assertEquals("A", token.getUserName());
|
||||||
assertEquals("B", token.getName());
|
Assert.assertEquals("B", token.getName());
|
||||||
assertEquals(getExpectedType(), token.getType());
|
Assert.assertEquals(getExpectedType(), token.getType());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testNonDefaultNonBrowserUserAgentAsBrowser() throws Exception {
|
public void testNonDefaultNonBrowserUserAgentAsBrowser() throws Exception {
|
||||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||||
|
@ -81,11 +85,12 @@ public class TestAltKerberosAuthenticationHandler
|
||||||
Mockito.when(request.getHeader("User-Agent")).thenReturn("blah");
|
Mockito.when(request.getHeader("User-Agent")).thenReturn("blah");
|
||||||
// Should use alt authentication
|
// Should use alt authentication
|
||||||
AuthenticationToken token = handler.authenticate(request, response);
|
AuthenticationToken token = handler.authenticate(request, response);
|
||||||
assertEquals("A", token.getUserName());
|
Assert.assertEquals("A", token.getUserName());
|
||||||
assertEquals("B", token.getName());
|
Assert.assertEquals("B", token.getName());
|
||||||
assertEquals(getExpectedType(), token.getType());
|
Assert.assertEquals(getExpectedType(), token.getType());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testNonDefaultNonBrowserUserAgentAsNonBrowser() throws Exception {
|
public void testNonDefaultNonBrowserUserAgentAsNonBrowser() throws Exception {
|
||||||
if (handler != null) {
|
if (handler != null) {
|
||||||
handler.destroy();
|
handler.destroy();
|
||||||
|
|
|
@ -16,7 +16,8 @@ package org.apache.hadoop.security.authentication.server;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
|
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.authentication.util.Signer;
|
import org.apache.hadoop.security.authentication.util.Signer;
|
||||||
import junit.framework.TestCase;
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
import org.mockito.stubbing.Answer;
|
import org.mockito.stubbing.Answer;
|
||||||
|
@ -34,8 +35,9 @@ import java.util.Arrays;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Vector;
|
import java.util.Vector;
|
||||||
|
|
||||||
public class TestAuthenticationFilter extends TestCase {
|
public class TestAuthenticationFilter {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetConfiguration() throws Exception {
|
public void testGetConfiguration() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
FilterConfig config = Mockito.mock(FilterConfig.class);
|
FilterConfig config = Mockito.mock(FilterConfig.class);
|
||||||
|
@ -43,27 +45,28 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
Mockito.when(config.getInitParameter("a")).thenReturn("A");
|
Mockito.when(config.getInitParameter("a")).thenReturn("A");
|
||||||
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("a")).elements());
|
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("a")).elements());
|
||||||
Properties props = filter.getConfiguration("", config);
|
Properties props = filter.getConfiguration("", config);
|
||||||
assertEquals("A", props.getProperty("a"));
|
Assert.assertEquals("A", props.getProperty("a"));
|
||||||
|
|
||||||
config = Mockito.mock(FilterConfig.class);
|
config = Mockito.mock(FilterConfig.class);
|
||||||
Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
|
Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
|
||||||
Mockito.when(config.getInitParameter("foo.a")).thenReturn("A");
|
Mockito.when(config.getInitParameter("foo.a")).thenReturn("A");
|
||||||
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("foo.a")).elements());
|
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("foo.a")).elements());
|
||||||
props = filter.getConfiguration("foo.", config);
|
props = filter.getConfiguration("foo.", config);
|
||||||
assertEquals("A", props.getProperty("a"));
|
Assert.assertEquals("A", props.getProperty("a"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testInitEmpty() throws Exception {
|
public void testInitEmpty() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
FilterConfig config = Mockito.mock(FilterConfig.class);
|
FilterConfig config = Mockito.mock(FilterConfig.class);
|
||||||
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>().elements());
|
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>().elements());
|
||||||
filter.init(config);
|
filter.init(config);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (ServletException ex) {
|
} catch (ServletException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
|
@ -126,6 +129,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testInit() throws Exception {
|
public void testInit() throws Exception {
|
||||||
|
|
||||||
// minimal configuration & simple auth handler (Pseudo)
|
// minimal configuration & simple auth handler (Pseudo)
|
||||||
|
@ -138,11 +142,11 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
||||||
AuthenticationFilter.AUTH_TOKEN_VALIDITY)).elements());
|
AuthenticationFilter.AUTH_TOKEN_VALIDITY)).elements());
|
||||||
filter.init(config);
|
filter.init(config);
|
||||||
assertEquals(PseudoAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
|
Assert.assertEquals(PseudoAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
|
||||||
assertTrue(filter.isRandomSecret());
|
Assert.assertTrue(filter.isRandomSecret());
|
||||||
assertNull(filter.getCookieDomain());
|
Assert.assertNull(filter.getCookieDomain());
|
||||||
assertNull(filter.getCookiePath());
|
Assert.assertNull(filter.getCookiePath());
|
||||||
assertEquals(1000, filter.getValidity());
|
Assert.assertEquals(1000, filter.getValidity());
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
|
@ -157,7 +161,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
||||||
AuthenticationFilter.SIGNATURE_SECRET)).elements());
|
AuthenticationFilter.SIGNATURE_SECRET)).elements());
|
||||||
filter.init(config);
|
filter.init(config);
|
||||||
assertFalse(filter.isRandomSecret());
|
Assert.assertFalse(filter.isRandomSecret());
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
|
@ -174,13 +178,12 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
AuthenticationFilter.COOKIE_DOMAIN,
|
AuthenticationFilter.COOKIE_DOMAIN,
|
||||||
AuthenticationFilter.COOKIE_PATH)).elements());
|
AuthenticationFilter.COOKIE_PATH)).elements());
|
||||||
filter.init(config);
|
filter.init(config);
|
||||||
assertEquals(".foo.com", filter.getCookieDomain());
|
Assert.assertEquals(".foo.com", filter.getCookieDomain());
|
||||||
assertEquals("/bar", filter.getCookiePath());
|
Assert.assertEquals("/bar", filter.getCookiePath());
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// authentication handler lifecycle, and custom impl
|
// authentication handler lifecycle, and custom impl
|
||||||
DummyAuthenticationHandler.reset();
|
DummyAuthenticationHandler.reset();
|
||||||
filter = new AuthenticationFilter();
|
filter = new AuthenticationFilter();
|
||||||
|
@ -195,10 +198,10 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
||||||
"management.operation.return")).elements());
|
"management.operation.return")).elements());
|
||||||
filter.init(config);
|
filter.init(config);
|
||||||
assertTrue(DummyAuthenticationHandler.init);
|
Assert.assertTrue(DummyAuthenticationHandler.init);
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
assertTrue(DummyAuthenticationHandler.destroy);
|
Assert.assertTrue(DummyAuthenticationHandler.destroy);
|
||||||
}
|
}
|
||||||
|
|
||||||
// kerberos auth handler
|
// kerberos auth handler
|
||||||
|
@ -212,11 +215,12 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
} catch (ServletException ex) {
|
} catch (ServletException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} finally {
|
} finally {
|
||||||
assertEquals(KerberosAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
|
Assert.assertEquals(KerberosAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetRequestURL() throws Exception {
|
public void testGetRequestURL() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -235,12 +239,13 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
|
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
|
||||||
Mockito.when(request.getQueryString()).thenReturn("a=A&b=B");
|
Mockito.when(request.getQueryString()).thenReturn("a=A&b=B");
|
||||||
|
|
||||||
assertEquals("http://foo:8080/bar?a=A&b=B", filter.getRequestURL(request));
|
Assert.assertEquals("http://foo:8080/bar?a=A&b=B", filter.getRequestURL(request));
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetToken() throws Exception {
|
public void testGetToken() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -268,12 +273,13 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
AuthenticationToken newToken = filter.getToken(request);
|
AuthenticationToken newToken = filter.getToken(request);
|
||||||
|
|
||||||
assertEquals(token.toString(), newToken.toString());
|
Assert.assertEquals(token.toString(), newToken.toString());
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetTokenExpired() throws Exception {
|
public void testGetTokenExpired() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -300,17 +306,18 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
filter.getToken(request);
|
filter.getToken(request);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetTokenInvalidType() throws Exception {
|
public void testGetTokenInvalidType() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -338,17 +345,18 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
filter.getToken(request);
|
filter.getToken(request);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterNotAuthenticated() throws Exception {
|
public void testDoFilterNotAuthenticated() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -374,7 +382,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
new Answer<Object>() {
|
new Answer<Object>() {
|
||||||
@Override
|
@Override
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||||
fail();
|
Assert.fail();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -468,27 +476,27 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
Mockito.verify(response, Mockito.never()).
|
Mockito.verify(response, Mockito.never()).
|
||||||
addCookie(Mockito.any(Cookie.class));
|
addCookie(Mockito.any(Cookie.class));
|
||||||
} else {
|
} else {
|
||||||
assertNotNull(setCookie[0]);
|
Assert.assertNotNull(setCookie[0]);
|
||||||
assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
|
Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
|
||||||
assertTrue(setCookie[0].getValue().contains("u="));
|
Assert.assertTrue(setCookie[0].getValue().contains("u="));
|
||||||
assertTrue(setCookie[0].getValue().contains("p="));
|
Assert.assertTrue(setCookie[0].getValue().contains("p="));
|
||||||
assertTrue(setCookie[0].getValue().contains("t="));
|
Assert.assertTrue(setCookie[0].getValue().contains("t="));
|
||||||
assertTrue(setCookie[0].getValue().contains("e="));
|
Assert.assertTrue(setCookie[0].getValue().contains("e="));
|
||||||
assertTrue(setCookie[0].getValue().contains("s="));
|
Assert.assertTrue(setCookie[0].getValue().contains("s="));
|
||||||
assertTrue(calledDoFilter[0]);
|
Assert.assertTrue(calledDoFilter[0]);
|
||||||
|
|
||||||
Signer signer = new Signer("secret".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String value = signer.verifyAndExtract(setCookie[0].getValue());
|
String value = signer.verifyAndExtract(setCookie[0].getValue());
|
||||||
AuthenticationToken token = AuthenticationToken.parse(value);
|
AuthenticationToken token = AuthenticationToken.parse(value);
|
||||||
assertEquals(System.currentTimeMillis() + 1000 * 1000,
|
Assert.assertEquals(System.currentTimeMillis() + 1000 * 1000,
|
||||||
token.getExpires(), 100);
|
token.getExpires(), 100);
|
||||||
|
|
||||||
if (withDomainPath) {
|
if (withDomainPath) {
|
||||||
assertEquals(".foo.com", setCookie[0].getDomain());
|
Assert.assertEquals(".foo.com", setCookie[0].getDomain());
|
||||||
assertEquals("/bar", setCookie[0].getPath());
|
Assert.assertEquals("/bar", setCookie[0].getPath());
|
||||||
} else {
|
} else {
|
||||||
assertNull(setCookie[0].getDomain());
|
Assert.assertNull(setCookie[0].getDomain());
|
||||||
assertNull(setCookie[0].getPath());
|
Assert.assertNull(setCookie[0].getPath());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -496,22 +504,27 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthentication() throws Exception {
|
public void testDoFilterAuthentication() throws Exception {
|
||||||
_testDoFilterAuthentication(false, false, false);
|
_testDoFilterAuthentication(false, false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthenticationImmediateExpiration() throws Exception {
|
public void testDoFilterAuthenticationImmediateExpiration() throws Exception {
|
||||||
_testDoFilterAuthentication(false, false, true);
|
_testDoFilterAuthentication(false, false, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthenticationWithInvalidToken() throws Exception {
|
public void testDoFilterAuthenticationWithInvalidToken() throws Exception {
|
||||||
_testDoFilterAuthentication(false, true, false);
|
_testDoFilterAuthentication(false, true, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthenticationWithDomainPath() throws Exception {
|
public void testDoFilterAuthenticationWithDomainPath() throws Exception {
|
||||||
_testDoFilterAuthentication(true, false, false);
|
_testDoFilterAuthentication(true, false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthenticated() throws Exception {
|
public void testDoFilterAuthenticated() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -547,8 +560,8 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||||
Object[] args = invocation.getArguments();
|
Object[] args = invocation.getArguments();
|
||||||
HttpServletRequest request = (HttpServletRequest) args[0];
|
HttpServletRequest request = (HttpServletRequest) args[0];
|
||||||
assertEquals("u", request.getRemoteUser());
|
Assert.assertEquals("u", request.getRemoteUser());
|
||||||
assertEquals("p", request.getUserPrincipal().getName());
|
Assert.assertEquals("p", request.getUserPrincipal().getName());
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -561,6 +574,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthenticatedExpired() throws Exception {
|
public void testDoFilterAuthenticatedExpired() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -594,7 +608,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
new Answer<Object>() {
|
new Answer<Object>() {
|
||||||
@Override
|
@Override
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||||
fail();
|
Assert.fail();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -616,15 +630,15 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
|
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
|
||||||
|
|
||||||
assertNotNull(setCookie[0]);
|
Assert.assertNotNull(setCookie[0]);
|
||||||
assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
|
Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
|
||||||
assertEquals("", setCookie[0].getValue());
|
Assert.assertEquals("", setCookie[0].getValue());
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDoFilterAuthenticatedInvalidType() throws Exception {
|
public void testDoFilterAuthenticatedInvalidType() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
@ -658,7 +672,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
new Answer<Object>() {
|
new Answer<Object>() {
|
||||||
@Override
|
@Override
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||||
fail();
|
Assert.fail();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -680,14 +694,15 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
|
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
|
||||||
|
|
||||||
assertNotNull(setCookie[0]);
|
Assert.assertNotNull(setCookie[0]);
|
||||||
assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
|
Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
|
||||||
assertEquals("", setCookie[0].getValue());
|
Assert.assertEquals("", setCookie[0].getValue());
|
||||||
} finally {
|
} finally {
|
||||||
filter.destroy();
|
filter.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testManagementOperation() throws Exception {
|
public void testManagementOperation() throws Exception {
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
AuthenticationFilter filter = new AuthenticationFilter();
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -14,98 +14,104 @@
|
||||||
package org.apache.hadoop.security.authentication.server;
|
package org.apache.hadoop.security.authentication.server;
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import junit.framework.TestCase;
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestAuthenticationToken extends TestCase {
|
public class TestAuthenticationToken {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAnonymous() {
|
public void testAnonymous() {
|
||||||
assertNotNull(AuthenticationToken.ANONYMOUS);
|
Assert.assertNotNull(AuthenticationToken.ANONYMOUS);
|
||||||
assertEquals(null, AuthenticationToken.ANONYMOUS.getUserName());
|
Assert.assertEquals(null, AuthenticationToken.ANONYMOUS.getUserName());
|
||||||
assertEquals(null, AuthenticationToken.ANONYMOUS.getName());
|
Assert.assertEquals(null, AuthenticationToken.ANONYMOUS.getName());
|
||||||
assertEquals(null, AuthenticationToken.ANONYMOUS.getType());
|
Assert.assertEquals(null, AuthenticationToken.ANONYMOUS.getType());
|
||||||
assertEquals(-1, AuthenticationToken.ANONYMOUS.getExpires());
|
Assert.assertEquals(-1, AuthenticationToken.ANONYMOUS.getExpires());
|
||||||
assertFalse(AuthenticationToken.ANONYMOUS.isExpired());
|
Assert.assertFalse(AuthenticationToken.ANONYMOUS.isExpired());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConstructor() throws Exception {
|
public void testConstructor() throws Exception {
|
||||||
try {
|
try {
|
||||||
new AuthenticationToken(null, "p", "t");
|
new AuthenticationToken(null, "p", "t");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
new AuthenticationToken("", "p", "t");
|
new AuthenticationToken("", "p", "t");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
new AuthenticationToken("u", null, "t");
|
new AuthenticationToken("u", null, "t");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
new AuthenticationToken("u", "", "t");
|
new AuthenticationToken("u", "", "t");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
new AuthenticationToken("u", "p", null);
|
new AuthenticationToken("u", "p", null);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
new AuthenticationToken("u", "p", "");
|
new AuthenticationToken("u", "p", "");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
new AuthenticationToken("u", "p", "t");
|
new AuthenticationToken("u", "p", "t");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetters() throws Exception {
|
public void testGetters() throws Exception {
|
||||||
long expires = System.currentTimeMillis() + 50;
|
long expires = System.currentTimeMillis() + 50;
|
||||||
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
||||||
token.setExpires(expires);
|
token.setExpires(expires);
|
||||||
assertEquals("u", token.getUserName());
|
Assert.assertEquals("u", token.getUserName());
|
||||||
assertEquals("p", token.getName());
|
Assert.assertEquals("p", token.getName());
|
||||||
assertEquals("t", token.getType());
|
Assert.assertEquals("t", token.getType());
|
||||||
assertEquals(expires, token.getExpires());
|
Assert.assertEquals(expires, token.getExpires());
|
||||||
assertFalse(token.isExpired());
|
Assert.assertFalse(token.isExpired());
|
||||||
Thread.sleep(70); // +20 msec fuzz for timer granularity.
|
Thread.sleep(70); // +20 msec fuzz for timer granularity.
|
||||||
assertTrue(token.isExpired());
|
Assert.assertTrue(token.isExpired());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testToStringAndParse() throws Exception {
|
public void testToStringAndParse() throws Exception {
|
||||||
long expires = System.currentTimeMillis() + 50;
|
long expires = System.currentTimeMillis() + 50;
|
||||||
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
||||||
token.setExpires(expires);
|
token.setExpires(expires);
|
||||||
String str = token.toString();
|
String str = token.toString();
|
||||||
token = AuthenticationToken.parse(str);
|
token = AuthenticationToken.parse(str);
|
||||||
assertEquals("p", token.getName());
|
Assert.assertEquals("p", token.getName());
|
||||||
assertEquals("t", token.getType());
|
Assert.assertEquals("t", token.getType());
|
||||||
assertEquals(expires, token.getExpires());
|
Assert.assertEquals(expires, token.getExpires());
|
||||||
assertFalse(token.isExpired());
|
Assert.assertFalse(token.isExpired());
|
||||||
Thread.sleep(70); // +20 msec fuzz for timer granularity.
|
Thread.sleep(70); // +20 msec fuzz for timer granularity.
|
||||||
assertTrue(token.isExpired());
|
Assert.assertTrue(token.isExpired());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testParseInvalid() throws Exception {
|
public void testParseInvalid() throws Exception {
|
||||||
long expires = System.currentTimeMillis() + 50;
|
long expires = System.currentTimeMillis() + 50;
|
||||||
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
||||||
|
@ -114,11 +120,11 @@ public class TestAuthenticationToken extends TestCase {
|
||||||
str = str.substring(0, str.indexOf("e="));
|
str = str.substring(0, str.indexOf("e="));
|
||||||
try {
|
try {
|
||||||
AuthenticationToken.parse(str);
|
AuthenticationToken.parse(str);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,25 +13,31 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.server;
|
package org.apache.hadoop.security.authentication.server;
|
||||||
|
|
||||||
|
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
|
||||||
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
|
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosName;
|
import org.apache.hadoop.security.authentication.util.KerberosName;
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
||||||
import org.ietf.jgss.GSSContext;
|
import org.ietf.jgss.GSSContext;
|
||||||
import org.ietf.jgss.GSSManager;
|
import org.ietf.jgss.GSSManager;
|
||||||
import org.ietf.jgss.GSSName;
|
import org.ietf.jgss.GSSName;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.ietf.jgss.Oid;
|
import org.ietf.jgss.Oid;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.io.File;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
public class TestKerberosAuthenticationHandler extends TestCase {
|
public class TestKerberosAuthenticationHandler
|
||||||
|
extends KerberosSecurityTestcase {
|
||||||
|
|
||||||
protected KerberosAuthenticationHandler handler;
|
protected KerberosAuthenticationHandler handler;
|
||||||
|
|
||||||
|
@ -54,9 +60,16 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
return props;
|
return props;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Before
|
||||||
protected void setUp() throws Exception {
|
public void setup() throws Exception {
|
||||||
super.setUp();
|
// create keytab
|
||||||
|
File keytabFile = new File(KerberosTestUtils.getKeytabFile());
|
||||||
|
String clientPrincipal = KerberosTestUtils.getClientPrincipal();
|
||||||
|
String serverPrincipal = KerberosTestUtils.getServerPrincipal();
|
||||||
|
clientPrincipal = clientPrincipal.substring(0, clientPrincipal.lastIndexOf("@"));
|
||||||
|
serverPrincipal = serverPrincipal.substring(0, serverPrincipal.lastIndexOf("@"));
|
||||||
|
getKdc().createPrincipal(keytabFile, clientPrincipal, serverPrincipal);
|
||||||
|
// handler
|
||||||
handler = getNewAuthenticationHandler();
|
handler = getNewAuthenticationHandler();
|
||||||
Properties props = getDefaultProperties();
|
Properties props = getDefaultProperties();
|
||||||
try {
|
try {
|
||||||
|
@ -67,18 +80,10 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Test(timeout=60000)
|
||||||
protected void tearDown() throws Exception {
|
|
||||||
if (handler != null) {
|
|
||||||
handler.destroy();
|
|
||||||
handler = null;
|
|
||||||
}
|
|
||||||
super.tearDown();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testNameRules() throws Exception {
|
public void testNameRules() throws Exception {
|
||||||
KerberosName kn = new KerberosName(KerberosTestUtils.getServerPrincipal());
|
KerberosName kn = new KerberosName(KerberosTestUtils.getServerPrincipal());
|
||||||
assertEquals(KerberosTestUtils.getRealm(), kn.getRealm());
|
Assert.assertEquals(KerberosTestUtils.getRealm(), kn.getRealm());
|
||||||
|
|
||||||
//destroy handler created in setUp()
|
//destroy handler created in setUp()
|
||||||
handler.destroy();
|
handler.destroy();
|
||||||
|
@ -93,30 +98,32 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
}
|
}
|
||||||
kn = new KerberosName("bar@BAR");
|
kn = new KerberosName("bar@BAR");
|
||||||
assertEquals("bar", kn.getShortName());
|
Assert.assertEquals("bar", kn.getShortName());
|
||||||
kn = new KerberosName("bar@FOO");
|
kn = new KerberosName("bar@FOO");
|
||||||
try {
|
try {
|
||||||
kn.getShortName();
|
kn.getShortName();
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
catch (Exception ex) {
|
catch (Exception ex) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testInit() throws Exception {
|
public void testInit() throws Exception {
|
||||||
assertEquals(KerberosTestUtils.getServerPrincipal(), handler.getPrincipal());
|
Assert.assertEquals(KerberosTestUtils.getServerPrincipal(), handler.getPrincipal());
|
||||||
assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
|
Assert.assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testType() throws Exception {
|
public void testType() throws Exception {
|
||||||
assertEquals(getExpectedType(), handler.getType());
|
Assert.assertEquals(getExpectedType(), handler.getType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRequestWithoutAuthorization() throws Exception {
|
public void testRequestWithoutAuthorization() throws Exception {
|
||||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||||
|
|
||||||
assertNull(handler.authenticate(request, response));
|
Assert.assertNull(handler.authenticate(request, response));
|
||||||
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
|
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
|
||||||
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
|
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
|
||||||
}
|
}
|
||||||
|
@ -126,11 +133,12 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||||
|
|
||||||
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION)).thenReturn("invalid");
|
Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION)).thenReturn("invalid");
|
||||||
assertNull(handler.authenticate(request, response));
|
Assert.assertNull(handler.authenticate(request, response));
|
||||||
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
|
Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
|
||||||
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
|
Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
public void testRequestWithIncompleteAuthorization() throws Exception {
|
public void testRequestWithIncompleteAuthorization() throws Exception {
|
||||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||||
|
@ -139,15 +147,14 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
.thenReturn(KerberosAuthenticator.NEGOTIATE);
|
.thenReturn(KerberosAuthenticator.NEGOTIATE);
|
||||||
try {
|
try {
|
||||||
handler.authenticate(request, response);
|
handler.authenticate(request, response);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void testRequestWithAuthorization() throws Exception {
|
public void testRequestWithAuthorization() throws Exception {
|
||||||
String token = KerberosTestUtils.doAsClient(new Callable<String>() {
|
String token = KerberosTestUtils.doAsClient(new Callable<String>() {
|
||||||
@Override
|
@Override
|
||||||
|
@ -191,9 +198,9 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
|
Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
|
||||||
Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
|
Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
|
||||||
|
|
||||||
assertEquals(KerberosTestUtils.getClientPrincipal(), authToken.getName());
|
Assert.assertEquals(KerberosTestUtils.getClientPrincipal(), authToken.getName());
|
||||||
assertTrue(KerberosTestUtils.getClientPrincipal().startsWith(authToken.getUserName()));
|
Assert.assertTrue(KerberosTestUtils.getClientPrincipal().startsWith(authToken.getUserName()));
|
||||||
assertEquals(getExpectedType(), authToken.getType());
|
Assert.assertEquals(getExpectedType(), authToken.getType());
|
||||||
} else {
|
} else {
|
||||||
Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
|
Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
|
||||||
Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
|
Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
|
||||||
|
@ -213,12 +220,19 @@ public class TestKerberosAuthenticationHandler extends TestCase {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
handler.authenticate(request, response);
|
handler.authenticate(request, response);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() throws Exception {
|
||||||
|
if (handler != null) {
|
||||||
|
handler.destroy();
|
||||||
|
handler = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,33 +14,37 @@
|
||||||
package org.apache.hadoop.security.authentication.server;
|
package org.apache.hadoop.security.authentication.server;
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
|
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
public class TestPseudoAuthenticationHandler extends TestCase {
|
public class TestPseudoAuthenticationHandler {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testInit() throws Exception {
|
public void testInit() throws Exception {
|
||||||
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
||||||
try {
|
try {
|
||||||
Properties props = new Properties();
|
Properties props = new Properties();
|
||||||
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
|
props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
|
||||||
handler.init(props);
|
handler.init(props);
|
||||||
assertEquals(false, handler.getAcceptAnonymous());
|
Assert.assertEquals(false, handler.getAcceptAnonymous());
|
||||||
} finally {
|
} finally {
|
||||||
handler.destroy();
|
handler.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testType() throws Exception {
|
public void testType() throws Exception {
|
||||||
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
||||||
assertEquals(PseudoAuthenticationHandler.TYPE, handler.getType());
|
Assert.assertEquals(PseudoAuthenticationHandler.TYPE, handler.getType());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAnonymousOn() throws Exception {
|
public void testAnonymousOn() throws Exception {
|
||||||
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
||||||
try {
|
try {
|
||||||
|
@ -53,12 +57,13 @@ public class TestPseudoAuthenticationHandler extends TestCase {
|
||||||
|
|
||||||
AuthenticationToken token = handler.authenticate(request, response);
|
AuthenticationToken token = handler.authenticate(request, response);
|
||||||
|
|
||||||
assertEquals(AuthenticationToken.ANONYMOUS, token);
|
Assert.assertEquals(AuthenticationToken.ANONYMOUS, token);
|
||||||
} finally {
|
} finally {
|
||||||
handler.destroy();
|
handler.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAnonymousOff() throws Exception {
|
public void testAnonymousOff() throws Exception {
|
||||||
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
|
||||||
try {
|
try {
|
||||||
|
@ -70,11 +75,11 @@ public class TestPseudoAuthenticationHandler extends TestCase {
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
||||||
|
|
||||||
handler.authenticate(request, response);
|
handler.authenticate(request, response);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (AuthenticationException ex) {
|
} catch (AuthenticationException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
} finally {
|
} finally {
|
||||||
handler.destroy();
|
handler.destroy();
|
||||||
}
|
}
|
||||||
|
@ -93,19 +98,21 @@ public class TestPseudoAuthenticationHandler extends TestCase {
|
||||||
|
|
||||||
AuthenticationToken token = handler.authenticate(request, response);
|
AuthenticationToken token = handler.authenticate(request, response);
|
||||||
|
|
||||||
assertNotNull(token);
|
Assert.assertNotNull(token);
|
||||||
assertEquals("user", token.getUserName());
|
Assert.assertEquals("user", token.getUserName());
|
||||||
assertEquals("user", token.getName());
|
Assert.assertEquals("user", token.getName());
|
||||||
assertEquals(PseudoAuthenticationHandler.TYPE, token.getType());
|
Assert.assertEquals(PseudoAuthenticationHandler.TYPE, token.getType());
|
||||||
} finally {
|
} finally {
|
||||||
handler.destroy();
|
handler.destroy();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testUserNameAnonymousOff() throws Exception {
|
public void testUserNameAnonymousOff() throws Exception {
|
||||||
_testUserName(false);
|
_testUserName(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testUserNameAnonymousOn() throws Exception {
|
public void testUserNameAnonymousOn() throws Exception {
|
||||||
_testUserName(true);
|
_testUserName(true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,14 +21,19 @@ package org.apache.hadoop.security.authentication.util;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
||||||
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import static org.junit.Assert.*;
|
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
public class TestKerberosName {
|
public class TestKerberosName {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
|
System.setProperty("java.security.krb5.realm", KerberosTestUtils.getRealm());
|
||||||
|
System.setProperty("java.security.krb5.kdc", "localhost:88");
|
||||||
|
|
||||||
String rules =
|
String rules =
|
||||||
"RULE:[1:$1@$0](.*@YAHOO\\.COM)s/@.*//\n" +
|
"RULE:[1:$1@$0](.*@YAHOO\\.COM)s/@.*//\n" +
|
||||||
"RULE:[2:$1](johndoe)s/^.*$/guest/\n" +
|
"RULE:[2:$1](johndoe)s/^.*$/guest/\n" +
|
||||||
|
@ -44,7 +49,7 @@ public class TestKerberosName {
|
||||||
KerberosName nm = new KerberosName(from);
|
KerberosName nm = new KerberosName(from);
|
||||||
String simple = nm.getShortName();
|
String simple = nm.getShortName();
|
||||||
System.out.println("to " + simple);
|
System.out.println("to " + simple);
|
||||||
assertEquals("short name incorrect", to, simple);
|
Assert.assertEquals("short name incorrect", to, simple);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -61,7 +66,7 @@ public class TestKerberosName {
|
||||||
System.out.println("Checking " + name + " to ensure it is bad.");
|
System.out.println("Checking " + name + " to ensure it is bad.");
|
||||||
try {
|
try {
|
||||||
new KerberosName(name);
|
new KerberosName(name);
|
||||||
fail("didn't get exception for " + name);
|
Assert.fail("didn't get exception for " + name);
|
||||||
} catch (IllegalArgumentException iae) {
|
} catch (IllegalArgumentException iae) {
|
||||||
// PASS
|
// PASS
|
||||||
}
|
}
|
||||||
|
@ -72,7 +77,7 @@ public class TestKerberosName {
|
||||||
KerberosName nm = new KerberosName(from);
|
KerberosName nm = new KerberosName(from);
|
||||||
try {
|
try {
|
||||||
nm.getShortName();
|
nm.getShortName();
|
||||||
fail("didn't get exception for " + from);
|
Assert.fail("didn't get exception for " + from);
|
||||||
} catch (IOException ie) {
|
} catch (IOException ie) {
|
||||||
// PASS
|
// PASS
|
||||||
}
|
}
|
||||||
|
@ -85,4 +90,10 @@ public class TestKerberosName {
|
||||||
checkBadTranslation("foo@ACME.COM");
|
checkBadTranslation("foo@ACME.COM");
|
||||||
checkBadTranslation("root/joe@FOO.COM");
|
checkBadTranslation("root/joe@FOO.COM");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void clear() {
|
||||||
|
System.clearProperty("java.security.krb5.realm");
|
||||||
|
System.clearProperty("java.security.krb5.kdc");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,11 +16,10 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.util;
|
package org.apache.hadoop.security.authentication.util;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import org.junit.Assert;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestKerberosUtil {
|
public class TestKerberosUtil {
|
||||||
|
@ -32,23 +31,23 @@ public class TestKerberosUtil {
|
||||||
String testHost = "FooBar";
|
String testHost = "FooBar";
|
||||||
|
|
||||||
// send null hostname
|
// send null hostname
|
||||||
assertEquals("When no hostname is sent",
|
Assert.assertEquals("When no hostname is sent",
|
||||||
service + "/" + localHostname.toLowerCase(),
|
service + "/" + localHostname.toLowerCase(),
|
||||||
KerberosUtil.getServicePrincipal(service, null));
|
KerberosUtil.getServicePrincipal(service, null));
|
||||||
// send empty hostname
|
// send empty hostname
|
||||||
assertEquals("When empty hostname is sent",
|
Assert.assertEquals("When empty hostname is sent",
|
||||||
service + "/" + localHostname.toLowerCase(),
|
service + "/" + localHostname.toLowerCase(),
|
||||||
KerberosUtil.getServicePrincipal(service, ""));
|
KerberosUtil.getServicePrincipal(service, ""));
|
||||||
// send 0.0.0.0 hostname
|
// send 0.0.0.0 hostname
|
||||||
assertEquals("When 0.0.0.0 hostname is sent",
|
Assert.assertEquals("When 0.0.0.0 hostname is sent",
|
||||||
service + "/" + localHostname.toLowerCase(),
|
service + "/" + localHostname.toLowerCase(),
|
||||||
KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
|
KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
|
||||||
// send uppercase hostname
|
// send uppercase hostname
|
||||||
assertEquals("When uppercase hostname is sent",
|
Assert.assertEquals("When uppercase hostname is sent",
|
||||||
service + "/" + testHost.toLowerCase(),
|
service + "/" + testHost.toLowerCase(),
|
||||||
KerberosUtil.getServicePrincipal(service, testHost));
|
KerberosUtil.getServicePrincipal(service, testHost));
|
||||||
// send lowercase hostname
|
// send lowercase hostname
|
||||||
assertEquals("When lowercase hostname is sent",
|
Assert.assertEquals("When lowercase hostname is sent",
|
||||||
service + "/" + testHost.toLowerCase(),
|
service + "/" + testHost.toLowerCase(),
|
||||||
KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
|
KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,68 +13,75 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.util;
|
package org.apache.hadoop.security.authentication.util;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestSigner extends TestCase {
|
public class TestSigner {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNoSecret() throws Exception {
|
public void testNoSecret() throws Exception {
|
||||||
try {
|
try {
|
||||||
new Signer(null);
|
new Signer(null);
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
catch (IllegalArgumentException ex) {
|
catch (IllegalArgumentException ex) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNullAndEmptyString() throws Exception {
|
public void testNullAndEmptyString() throws Exception {
|
||||||
Signer signer = new Signer("secret".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
try {
|
try {
|
||||||
signer.sign(null);
|
signer.sign(null);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
signer.sign("");
|
signer.sign("");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (IllegalArgumentException ex) {
|
} catch (IllegalArgumentException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSignature() throws Exception {
|
public void testSignature() throws Exception {
|
||||||
Signer signer = new Signer("secret".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String s1 = signer.sign("ok");
|
String s1 = signer.sign("ok");
|
||||||
String s2 = signer.sign("ok");
|
String s2 = signer.sign("ok");
|
||||||
String s3 = signer.sign("wrong");
|
String s3 = signer.sign("wrong");
|
||||||
assertEquals(s1, s2);
|
Assert.assertEquals(s1, s2);
|
||||||
assertNotSame(s1, s3);
|
Assert.assertNotSame(s1, s3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testVerify() throws Exception {
|
public void testVerify() throws Exception {
|
||||||
Signer signer = new Signer("secret".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String t = "test";
|
String t = "test";
|
||||||
String s = signer.sign(t);
|
String s = signer.sign(t);
|
||||||
String e = signer.verifyAndExtract(s);
|
String e = signer.verifyAndExtract(s);
|
||||||
assertEquals(t, e);
|
Assert.assertEquals(t, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testInvalidSignedText() throws Exception {
|
public void testInvalidSignedText() throws Exception {
|
||||||
Signer signer = new Signer("secret".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
try {
|
try {
|
||||||
signer.verifyAndExtract("test");
|
signer.verifyAndExtract("test");
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (SignerException ex) {
|
} catch (SignerException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testTampering() throws Exception {
|
public void testTampering() throws Exception {
|
||||||
Signer signer = new Signer("secret".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String t = "test";
|
String t = "test";
|
||||||
|
@ -82,12 +89,11 @@ public class TestSigner extends TestCase {
|
||||||
s += "x";
|
s += "x";
|
||||||
try {
|
try {
|
||||||
signer.verifyAndExtract(s);
|
signer.verifyAndExtract(s);
|
||||||
fail();
|
Assert.fail();
|
||||||
} catch (SignerException ex) {
|
} catch (SignerException ex) {
|
||||||
// Expected
|
// Expected
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
fail();
|
Assert.fail();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
[libdefaults]
|
|
||||||
default_realm = ${kerberos.realm}
|
|
||||||
udp_preference_limit = 1
|
|
||||||
extra_addresses = 127.0.0.1
|
|
||||||
[realms]
|
|
||||||
${kerberos.realm} = {
|
|
||||||
admin_server = localhost:88
|
|
||||||
kdc = localhost:88
|
|
||||||
}
|
|
||||||
[domain_realm]
|
|
||||||
localhost = ${kerberos.realm}
|
|
|
@ -105,8 +105,6 @@ Trunk (Unreleased)
|
||||||
|
|
||||||
HADOOP-9833 move slf4j to version 1.7.5 (Kousuke Saruta via stevel)
|
HADOOP-9833 move slf4j to version 1.7.5 (Kousuke Saruta via stevel)
|
||||||
|
|
||||||
HADOOP-9845. Update protobuf to 2.5 from 2.4.x. (tucu)
|
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-9451. Fault single-layer config if node group topology is enabled.
|
HADOOP-9451. Fault single-layer config if node group topology is enabled.
|
||||||
|
@ -293,6 +291,11 @@ Release 2.3.0 - UNRELEASED
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
|
|
||||||
|
HADOOP-9784. Add a builder for HttpServer. (Junping Du via llu)
|
||||||
|
|
||||||
|
HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
|
||||||
|
(Junping Du via llu)
|
||||||
|
|
||||||
HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)
|
HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)
|
||||||
|
|
||||||
HADOOP-9241. DU refresh interval is not configurable (harsh)
|
HADOOP-9241. DU refresh interval is not configurable (harsh)
|
||||||
|
@ -315,6 +318,15 @@ Release 2.3.0 - UNRELEASED
|
||||||
HADOOP-9848. Create a MiniKDC for use with security testing.
|
HADOOP-9848. Create a MiniKDC for use with security testing.
|
||||||
(ywskycn via tucu)
|
(ywskycn via tucu)
|
||||||
|
|
||||||
|
HADOOP-9860. Remove class HackedKeytab and HackedKeytabEncoder from
|
||||||
|
hadoop-minikdc once jira DIRSERVER-1882 solved. (ywskycn via tucu)
|
||||||
|
|
||||||
|
HADOOP-9866. convert hadoop-auth testcases requiring kerberos to
|
||||||
|
use minikdc. (ywskycn via tucu)
|
||||||
|
|
||||||
|
HADOOP-9487 Deprecation warnings in Configuration should go to their
|
||||||
|
own log or otherwise be suppressible (Chu Tong via stevel)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
|
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
|
||||||
|
@ -330,6 +342,18 @@ Release 2.3.0 - UNRELEASED
|
||||||
HADOOP-9817. FileSystem#globStatus and FileContext#globStatus need to work
|
HADOOP-9817. FileSystem#globStatus and FileContext#globStatus need to work
|
||||||
with symlinks. (Colin Patrick McCabe via Andrew Wang)
|
with symlinks. (Colin Patrick McCabe via Andrew Wang)
|
||||||
|
|
||||||
|
HADOOP-9652. RawLocalFs#getFileLinkStatus does not fill in the link owner
|
||||||
|
and mode. (Andrew Wang via Colin Patrick McCabe)
|
||||||
|
|
||||||
|
HADOOP-9875. TestDoAsEffectiveUser can fail on JDK 7. (Aaron T. Myers via
|
||||||
|
Colin Patrick McCabe)
|
||||||
|
|
||||||
|
HADOOP-9865. FileContext#globStatus has a regression with respect to
|
||||||
|
relative path. (Chuan Lin via Colin Patrick McCabe)
|
||||||
|
|
||||||
|
HADOOP-9877. Fix listing of snapshot directories in globStatus.
|
||||||
|
(Binglin Chang via Andrew Wang)
|
||||||
|
|
||||||
Release 2.1.1-beta - UNRELEASED
|
Release 2.1.1-beta - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -358,6 +382,11 @@ Release 2.1.1-beta - UNRELEASED
|
||||||
|
|
||||||
HADOOP-9802. Support Snappy codec on Windows. (cnauroth)
|
HADOOP-9802. Support Snappy codec on Windows. (cnauroth)
|
||||||
|
|
||||||
|
HADOOP-9879. Move the version info of zookeeper dependencies to
|
||||||
|
hadoop-project/pom (Karthik Kambatla via Sandy Ryza)
|
||||||
|
|
||||||
|
HADOOP-9886. Turn warning message in RetryInvocationHandler to debug (arpit)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
@ -384,7 +413,14 @@ Release 2.1.1-beta - UNRELEASED
|
||||||
HADOOP-9857. Tests block and sometimes timeout on Windows due to invalid
|
HADOOP-9857. Tests block and sometimes timeout on Windows due to invalid
|
||||||
entropy source. (cnauroth)
|
entropy source. (cnauroth)
|
||||||
|
|
||||||
Release 2.1.0-beta - 2013-08-06
|
HADOOP-9381. Document dfs cp -f option. (Keegan Witt, suresh via suresh)
|
||||||
|
|
||||||
|
HADOOP-9868. Server must not advertise kerberos realm. (daryn via kihwal)
|
||||||
|
|
||||||
|
HADOOP-9880. SASL changes from HADOOP-9421 breaks Secure HA NN. (daryn via
|
||||||
|
jing9)
|
||||||
|
|
||||||
|
Release 2.1.0-beta - 2013-08-22
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
||||||
|
@ -581,6 +617,10 @@ Release 2.1.0-beta - 2013-08-06
|
||||||
HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs
|
HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs
|
||||||
(todd)
|
(todd)
|
||||||
|
|
||||||
|
HADOOP-9845. Update protobuf to 2.5 from 2.4.x. (tucu)
|
||||||
|
|
||||||
|
HADOOP-9872. Improve protoc version handling and detection. (tucu)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-9294. GetGroupsTestBase fails on Windows. (Chris Nauroth via suresh)
|
HADOOP-9294. GetGroupsTestBase fails on Windows. (Chris Nauroth via suresh)
|
||||||
|
@ -2038,6 +2078,9 @@ Release 0.23.10 - UNRELEASED
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
|
|
||||||
|
HADOOP-9686. Easy access to final parameters in Configuration (Jason Lowe
|
||||||
|
via jeagles)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
|
@ -217,7 +217,6 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
<version>3.4.2</version>
|
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>jline</groupId>
|
<groupId>jline</groupId>
|
||||||
|
@ -245,7 +244,6 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
<version>3.4.2</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
@ -309,6 +307,7 @@
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>${protobuf.version}</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/src/main/proto</param>
|
<param>${basedir}/src/main/proto</param>
|
||||||
</imports>
|
</imports>
|
||||||
|
@ -338,6 +337,7 @@
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>${protobuf.version}</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/src/test/proto</param>
|
<param>${basedir}/src/test/proto</param>
|
||||||
</imports>
|
</imports>
|
||||||
|
|
|
@ -130,6 +130,13 @@ log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
|
||||||
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||||
log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
|
log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
|
||||||
|
|
||||||
|
#
|
||||||
|
# hadoop configuration logging
|
||||||
|
#
|
||||||
|
|
||||||
|
# Uncomment the following line to turn off configuration deprecation warnings.
|
||||||
|
# log4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN
|
||||||
|
|
||||||
#
|
#
|
||||||
# hdfs audit logging
|
# hdfs audit logging
|
||||||
#
|
#
|
||||||
|
@ -231,4 +238,5 @@ log4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||||
#log4j.appender.HSAUDIT.File=${hadoop.log.dir}/hs-audit.log
|
#log4j.appender.HSAUDIT.File=${hadoop.log.dir}/hs-audit.log
|
||||||
#log4j.appender.HSAUDIT.layout=org.apache.log4j.PatternLayout
|
#log4j.appender.HSAUDIT.layout=org.apache.log4j.PatternLayout
|
||||||
#log4j.appender.HSAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
#log4j.appender.HSAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||||
#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
|
#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
|
||||||
|
|
||||||
|
|
|
@ -153,6 +153,10 @@ import com.google.common.base.Preconditions;
|
||||||
* will be resolved to another property in this Configuration, while
|
* will be resolved to another property in this Configuration, while
|
||||||
* <tt>${<i>user.name</i>}</tt> would then ordinarily be resolved to the value
|
* <tt>${<i>user.name</i>}</tt> would then ordinarily be resolved to the value
|
||||||
* of the System property with that name.
|
* of the System property with that name.
|
||||||
|
* By default, warnings will be given to any deprecated configuration
|
||||||
|
* parameters and these are suppressible by configuring
|
||||||
|
* <tt>log4j.logger.org.apache.hadoop.conf.Configuration.deprecation</tt> in
|
||||||
|
* log4j.properties file.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
@InterfaceStability.Stable
|
@InterfaceStability.Stable
|
||||||
|
@ -161,6 +165,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(Configuration.class);
|
LogFactory.getLog(Configuration.class);
|
||||||
|
|
||||||
|
private static final Log LOG_DEPRECATION =
|
||||||
|
LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation");
|
||||||
|
|
||||||
private boolean quietmode = true;
|
private boolean quietmode = true;
|
||||||
|
|
||||||
private static class Resource {
|
private static class Resource {
|
||||||
|
@ -836,7 +843,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
private void warnOnceIfDeprecated(String name) {
|
private void warnOnceIfDeprecated(String name) {
|
||||||
DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
|
DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
|
||||||
if (keyInfo != null && !keyInfo.accessed) {
|
if (keyInfo != null && !keyInfo.accessed) {
|
||||||
LOG.warn(keyInfo.getWarningMessage(name));
|
LOG_DEPRECATION.info(keyInfo.getWarningMessage(name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1911,6 +1918,15 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the set of parameters marked final.
|
||||||
|
*
|
||||||
|
* @return final parameter set.
|
||||||
|
*/
|
||||||
|
public Set<String> getFinalParameters() {
|
||||||
|
return new HashSet<String>(finalParameters);
|
||||||
|
}
|
||||||
|
|
||||||
protected synchronized Properties getProps() {
|
protected synchronized Properties getProps() {
|
||||||
if (properties == null) {
|
if (properties == null) {
|
||||||
properties = new Properties();
|
properties = new Properties();
|
||||||
|
|
|
@ -113,7 +113,14 @@ public abstract class DelegateToFileSystem extends AbstractFileSystem {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FileStatus getFileLinkStatus(final Path f) throws IOException {
|
public FileStatus getFileLinkStatus(final Path f) throws IOException {
|
||||||
return getFileStatus(f);
|
FileStatus status = fsImpl.getFileLinkStatus(f);
|
||||||
|
// FileSystem#getFileLinkStatus qualifies the link target
|
||||||
|
// AbstractFileSystem needs to return it plain since it's qualified
|
||||||
|
// in FileContext, so re-get and set the plain target
|
||||||
|
if (status.isSymlink()) {
|
||||||
|
status.setSymlink(fsImpl.getLinkTarget(f));
|
||||||
|
}
|
||||||
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -199,22 +206,18 @@ public abstract class DelegateToFileSystem extends AbstractFileSystem {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean supportsSymlinks() {
|
public boolean supportsSymlinks() {
|
||||||
return false;
|
return fsImpl.supportsSymlinks();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void createSymlink(Path target, Path link, boolean createParent)
|
public void createSymlink(Path target, Path link, boolean createParent)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
throw new IOException("File system does not support symlinks");
|
fsImpl.createSymlink(target, link, createParent);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Path getLinkTarget(final Path f) throws IOException {
|
public Path getLinkTarget(final Path f) throws IOException {
|
||||||
/* We should never get here. Any file system that threw an
|
return fsImpl.getLinkTarget(f);
|
||||||
* UnresolvedLinkException, causing this function to be called,
|
|
||||||
* should override getLinkTarget.
|
|
||||||
*/
|
|
||||||
throw new AssertionError();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override //AbstractFileSystem
|
@Override //AbstractFileSystem
|
||||||
|
|
|
@ -62,6 +62,18 @@ class Globber {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private FileStatus getFileLinkStatus(Path path) {
|
||||||
|
try {
|
||||||
|
if (fs != null) {
|
||||||
|
return fs.getFileLinkStatus(path);
|
||||||
|
} else {
|
||||||
|
return fc.getFileLinkStatus(path);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private FileStatus[] listStatus(Path path) {
|
private FileStatus[] listStatus(Path path) {
|
||||||
try {
|
try {
|
||||||
if (fs != null) {
|
if (fs != null) {
|
||||||
|
@ -99,29 +111,41 @@ class Globber {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String schemeFromPath(Path path) throws IOException {
|
private String schemeFromPath(Path path) throws IOException {
|
||||||
String scheme = pathPattern.toUri().getScheme();
|
String scheme = path.toUri().getScheme();
|
||||||
if (scheme == null) {
|
if (scheme == null) {
|
||||||
if (fs != null) {
|
if (fs != null) {
|
||||||
scheme = fs.getUri().getScheme();
|
scheme = fs.getUri().getScheme();
|
||||||
} else {
|
} else {
|
||||||
scheme = fc.getFSofPath(path).getUri().getScheme();
|
scheme = fc.getDefaultFileSystem().getUri().getScheme();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return scheme;
|
return scheme;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String authorityFromPath(Path path) throws IOException {
|
private String authorityFromPath(Path path) throws IOException {
|
||||||
String authority = pathPattern.toUri().getAuthority();
|
String authority = path.toUri().getAuthority();
|
||||||
if (authority == null) {
|
if (authority == null) {
|
||||||
if (fs != null) {
|
if (fs != null) {
|
||||||
authority = fs.getUri().getAuthority();
|
authority = fs.getUri().getAuthority();
|
||||||
} else {
|
} else {
|
||||||
authority = fc.getFSofPath(path).getUri().getAuthority();
|
authority = fc.getDefaultFileSystem().getUri().getAuthority();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return authority ;
|
return authority ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The glob filter builds a regexp per path component. If the component
|
||||||
|
* does not contain a shell metachar, then it falls back to appending the
|
||||||
|
* raw string to the list of built up paths. This raw path needs to have
|
||||||
|
* the quoting removed. Ie. convert all occurrences of "\X" to "X"
|
||||||
|
* @param name of the path component
|
||||||
|
* @return the unquoted path component
|
||||||
|
*/
|
||||||
|
private static String unquotePathComponent(String name) {
|
||||||
|
return name.replaceAll("\\\\(.)", "$1");
|
||||||
|
}
|
||||||
|
|
||||||
public FileStatus[] glob() throws IOException {
|
public FileStatus[] glob() throws IOException {
|
||||||
// First we get the scheme and authority of the pattern that was passed
|
// First we get the scheme and authority of the pattern that was passed
|
||||||
// in.
|
// in.
|
||||||
|
@ -176,14 +200,30 @@ class Globber {
|
||||||
resolvedCandidate.isDirectory() == false) {
|
resolvedCandidate.isDirectory() == false) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
FileStatus[] children = listStatus(candidate.getPath());
|
// For components without pattern, we get its FileStatus directly
|
||||||
for (FileStatus child : children) {
|
// using getFileLinkStatus for two reasons:
|
||||||
// Set the child path based on the parent path.
|
// 1. It should be faster to only get FileStatus needed rather than
|
||||||
// This keeps the symlinks in our path.
|
// get all children.
|
||||||
child.setPath(new Path(candidate.getPath(),
|
// 2. Some special filesystem directories (e.g. HDFS snapshot
|
||||||
child.getPath().getName()));
|
// directories) are not returned by listStatus, but do exist if
|
||||||
if (globFilter.accept(child.getPath())) {
|
// checked explicitly via getFileLinkStatus.
|
||||||
newCandidates.add(child);
|
if (globFilter.hasPattern()) {
|
||||||
|
FileStatus[] children = listStatus(candidate.getPath());
|
||||||
|
for (FileStatus child : children) {
|
||||||
|
// Set the child path based on the parent path.
|
||||||
|
// This keeps the symlinks in our path.
|
||||||
|
child.setPath(new Path(candidate.getPath(),
|
||||||
|
child.getPath().getName()));
|
||||||
|
if (globFilter.accept(child.getPath())) {
|
||||||
|
newCandidates.add(child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Path p = new Path(candidate.getPath(), unquotePathComponent(component));
|
||||||
|
FileStatus s = getFileLinkStatus(p);
|
||||||
|
if (s != null) {
|
||||||
|
s.setPath(p);
|
||||||
|
newCandidates.add(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,15 +41,6 @@ import org.apache.hadoop.util.Shell;
|
||||||
*/
|
*/
|
||||||
public class HardLink {
|
public class HardLink {
|
||||||
|
|
||||||
public enum OSType {
|
|
||||||
OS_TYPE_UNIX,
|
|
||||||
OS_TYPE_WIN,
|
|
||||||
OS_TYPE_SOLARIS,
|
|
||||||
OS_TYPE_MAC,
|
|
||||||
OS_TYPE_FREEBSD
|
|
||||||
}
|
|
||||||
|
|
||||||
public static OSType osType;
|
|
||||||
private static HardLinkCommandGetter getHardLinkCommand;
|
private static HardLinkCommandGetter getHardLinkCommand;
|
||||||
|
|
||||||
public final LinkStats linkStats; //not static
|
public final LinkStats linkStats; //not static
|
||||||
|
@ -57,19 +48,18 @@ public class HardLink {
|
||||||
//initialize the command "getters" statically, so can use their
|
//initialize the command "getters" statically, so can use their
|
||||||
//methods without instantiating the HardLink object
|
//methods without instantiating the HardLink object
|
||||||
static {
|
static {
|
||||||
osType = getOSType();
|
if (Shell.WINDOWS) {
|
||||||
if (osType == OSType.OS_TYPE_WIN) {
|
|
||||||
// Windows
|
// Windows
|
||||||
getHardLinkCommand = new HardLinkCGWin();
|
getHardLinkCommand = new HardLinkCGWin();
|
||||||
} else {
|
} else {
|
||||||
// Unix
|
// Unix or Linux
|
||||||
getHardLinkCommand = new HardLinkCGUnix();
|
getHardLinkCommand = new HardLinkCGUnix();
|
||||||
//override getLinkCountCommand for the particular Unix variant
|
//override getLinkCountCommand for the particular Unix variant
|
||||||
//Linux is already set as the default - {"stat","-c%h", null}
|
//Linux is already set as the default - {"stat","-c%h", null}
|
||||||
if (osType == OSType.OS_TYPE_MAC || osType == OSType.OS_TYPE_FREEBSD) {
|
if (Shell.MAC || Shell.FREEBSD) {
|
||||||
String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
|
String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
|
||||||
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
|
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
|
||||||
} else if (osType == OSType.OS_TYPE_SOLARIS) {
|
} else if (Shell.SOLARIS) {
|
||||||
String[] linkCountCmdTemplate = {"ls","-l", null};
|
String[] linkCountCmdTemplate = {"ls","-l", null};
|
||||||
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
|
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
|
||||||
}
|
}
|
||||||
|
@ -80,26 +70,6 @@ public class HardLink {
|
||||||
linkStats = new LinkStats();
|
linkStats = new LinkStats();
|
||||||
}
|
}
|
||||||
|
|
||||||
static private OSType getOSType() {
|
|
||||||
String osName = System.getProperty("os.name");
|
|
||||||
if (Shell.WINDOWS) {
|
|
||||||
return OSType.OS_TYPE_WIN;
|
|
||||||
}
|
|
||||||
else if (osName.contains("SunOS")
|
|
||||||
|| osName.contains("Solaris")) {
|
|
||||||
return OSType.OS_TYPE_SOLARIS;
|
|
||||||
}
|
|
||||||
else if (osName.contains("Mac")) {
|
|
||||||
return OSType.OS_TYPE_MAC;
|
|
||||||
}
|
|
||||||
else if (osName.contains("FreeBSD")) {
|
|
||||||
return OSType.OS_TYPE_FREEBSD;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return OSType.OS_TYPE_UNIX;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This abstract class bridges the OS-dependent implementations of the
|
* This abstract class bridges the OS-dependent implementations of the
|
||||||
* needed functionality for creating hardlinks and querying link counts.
|
* needed functionality for creating hardlinks and querying link counts.
|
||||||
|
@ -548,7 +518,7 @@ public class HardLink {
|
||||||
if (inpMsg == null || exitValue != 0) {
|
if (inpMsg == null || exitValue != 0) {
|
||||||
throw createIOException(fileName, inpMsg, errMsg, exitValue, null);
|
throw createIOException(fileName, inpMsg, errMsg, exitValue, null);
|
||||||
}
|
}
|
||||||
if (osType == OSType.OS_TYPE_SOLARIS) {
|
if (Shell.SOLARIS) {
|
||||||
String[] result = inpMsg.split("\\s+");
|
String[] result = inpMsg.split("\\s+");
|
||||||
return Integer.parseInt(result[1]);
|
return Integer.parseInt(result[1]);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -51,6 +51,7 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
public class RawLocalFileSystem extends FileSystem {
|
public class RawLocalFileSystem extends FileSystem {
|
||||||
static final URI NAME = URI.create("file:///");
|
static final URI NAME = URI.create("file:///");
|
||||||
private Path workingDir;
|
private Path workingDir;
|
||||||
|
private static final boolean useDeprecatedFileStatus = !Stat.isAvailable();
|
||||||
|
|
||||||
public RawLocalFileSystem() {
|
public RawLocalFileSystem() {
|
||||||
workingDir = getInitialWorkingDirectory();
|
workingDir = getInitialWorkingDirectory();
|
||||||
|
@ -385,8 +386,11 @@ public class RawLocalFileSystem extends FileSystem {
|
||||||
throw new FileNotFoundException("File " + f + " does not exist");
|
throw new FileNotFoundException("File " + f + " does not exist");
|
||||||
}
|
}
|
||||||
if (localf.isFile()) {
|
if (localf.isFile()) {
|
||||||
|
if (!useDeprecatedFileStatus) {
|
||||||
|
return new FileStatus[] { getFileStatus(f) };
|
||||||
|
}
|
||||||
return new FileStatus[] {
|
return new FileStatus[] {
|
||||||
new RawLocalFileStatus(localf, getDefaultBlockSize(f), this) };
|
new DeprecatedRawLocalFileStatus(localf, getDefaultBlockSize(f), this)};
|
||||||
}
|
}
|
||||||
|
|
||||||
File[] names = localf.listFiles();
|
File[] names = localf.listFiles();
|
||||||
|
@ -516,15 +520,22 @@ public class RawLocalFileSystem extends FileSystem {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FileStatus getFileStatus(Path f) throws IOException {
|
public FileStatus getFileStatus(Path f) throws IOException {
|
||||||
|
return getFileLinkStatusInternal(f, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
|
private FileStatus deprecatedGetFileStatus(Path f) throws IOException {
|
||||||
File path = pathToFile(f);
|
File path = pathToFile(f);
|
||||||
if (path.exists()) {
|
if (path.exists()) {
|
||||||
return new RawLocalFileStatus(pathToFile(f), getDefaultBlockSize(f), this);
|
return new DeprecatedRawLocalFileStatus(pathToFile(f),
|
||||||
|
getDefaultBlockSize(f), this);
|
||||||
} else {
|
} else {
|
||||||
throw new FileNotFoundException("File " + f + " does not exist");
|
throw new FileNotFoundException("File " + f + " does not exist");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static class RawLocalFileStatus extends FileStatus {
|
@Deprecated
|
||||||
|
static class DeprecatedRawLocalFileStatus extends FileStatus {
|
||||||
/* We can add extra fields here. It breaks at least CopyFiles.FilePair().
|
/* We can add extra fields here. It breaks at least CopyFiles.FilePair().
|
||||||
* We recognize if the information is already loaded by check if
|
* We recognize if the information is already loaded by check if
|
||||||
* onwer.equals("").
|
* onwer.equals("").
|
||||||
|
@ -533,7 +544,7 @@ public class RawLocalFileSystem extends FileSystem {
|
||||||
return !super.getOwner().isEmpty();
|
return !super.getOwner().isEmpty();
|
||||||
}
|
}
|
||||||
|
|
||||||
RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
|
DeprecatedRawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
|
||||||
super(f.length(), f.isDirectory(), 1, defaultBlockSize,
|
super(f.length(), f.isDirectory(), 1, defaultBlockSize,
|
||||||
f.lastModified(), new Path(f.getPath()).makeQualified(fs.getUri(),
|
f.lastModified(), new Path(f.getPath()).makeQualified(fs.getUri(),
|
||||||
fs.getWorkingDirectory()));
|
fs.getWorkingDirectory()));
|
||||||
|
@ -699,7 +710,7 @@ public class RawLocalFileSystem extends FileSystem {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FileStatus getFileLinkStatus(final Path f) throws IOException {
|
public FileStatus getFileLinkStatus(final Path f) throws IOException {
|
||||||
FileStatus fi = getFileLinkStatusInternal(f);
|
FileStatus fi = getFileLinkStatusInternal(f, false);
|
||||||
// getFileLinkStatus is supposed to return a symlink with a
|
// getFileLinkStatus is supposed to return a symlink with a
|
||||||
// qualified path
|
// qualified path
|
||||||
if (fi.isSymlink()) {
|
if (fi.isSymlink()) {
|
||||||
|
@ -710,7 +721,35 @@ public class RawLocalFileSystem extends FileSystem {
|
||||||
return fi;
|
return fi;
|
||||||
}
|
}
|
||||||
|
|
||||||
private FileStatus getFileLinkStatusInternal(final Path f) throws IOException {
|
/**
|
||||||
|
* Public {@link FileStatus} methods delegate to this function, which in turn
|
||||||
|
* either call the new {@link Stat} based implementation or the deprecated
|
||||||
|
* methods based on platform support.
|
||||||
|
*
|
||||||
|
* @param f Path to stat
|
||||||
|
* @param dereference whether to dereference the final path component if a
|
||||||
|
* symlink
|
||||||
|
* @return FileStatus of f
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
private FileStatus getFileLinkStatusInternal(final Path f,
|
||||||
|
boolean dereference) throws IOException {
|
||||||
|
if (!useDeprecatedFileStatus) {
|
||||||
|
return getNativeFileLinkStatus(f, dereference);
|
||||||
|
} else if (dereference) {
|
||||||
|
return deprecatedGetFileStatus(f);
|
||||||
|
} else {
|
||||||
|
return deprecatedGetFileLinkStatusInternal(f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deprecated. Remains for legacy support. Should be removed when {@link Stat}
|
||||||
|
* gains support for Windows and other operating systems.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
private FileStatus deprecatedGetFileLinkStatusInternal(final Path f)
|
||||||
|
throws IOException {
|
||||||
String target = FileUtil.readLink(new File(f.toString()));
|
String target = FileUtil.readLink(new File(f.toString()));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -746,10 +785,31 @@ public class RawLocalFileSystem extends FileSystem {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Calls out to platform's native stat(1) implementation to get file metadata
|
||||||
|
* (permissions, user, group, atime, mtime, etc). This works around the lack
|
||||||
|
* of lstat(2) in Java 6.
|
||||||
|
*
|
||||||
|
* Currently, the {@link Stat} class used to do this only supports Linux
|
||||||
|
* and FreeBSD, so the old {@link #deprecatedGetFileLinkStatusInternal(Path)}
|
||||||
|
* implementation (deprecated) remains further OS support is added.
|
||||||
|
*
|
||||||
|
* @param f File to stat
|
||||||
|
* @param dereference whether to dereference symlinks
|
||||||
|
* @return FileStatus of f
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
private FileStatus getNativeFileLinkStatus(final Path f,
|
||||||
|
boolean dereference) throws IOException {
|
||||||
|
checkPath(f);
|
||||||
|
Stat stat = new Stat(f, getDefaultBlockSize(f), dereference, this);
|
||||||
|
FileStatus status = stat.getFileStatus();
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Path getLinkTarget(Path f) throws IOException {
|
public Path getLinkTarget(Path f) throws IOException {
|
||||||
FileStatus fi = getFileLinkStatusInternal(f);
|
FileStatus fi = getFileLinkStatusInternal(f, false);
|
||||||
// return an unqualified symlink target
|
// return an unqualified symlink target
|
||||||
return fi.getSymlink();
|
return fi.getSymlink();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,167 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.fs;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.FileNotFoundException;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.NoSuchElementException;
|
||||||
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper for the Unix stat(1) command. Used to workaround the lack of
|
||||||
|
* lstat(2) in Java 6.
|
||||||
|
*/
|
||||||
|
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||||
|
@InterfaceStability.Evolving
|
||||||
|
public class Stat extends Shell {
|
||||||
|
|
||||||
|
private final Path original;
|
||||||
|
private final Path qualified;
|
||||||
|
private final Path path;
|
||||||
|
private final long blockSize;
|
||||||
|
private final boolean dereference;
|
||||||
|
|
||||||
|
private FileStatus stat;
|
||||||
|
|
||||||
|
public Stat(Path path, long blockSize, boolean deref, FileSystem fs)
|
||||||
|
throws IOException {
|
||||||
|
super(0L, true);
|
||||||
|
// Original path
|
||||||
|
this.original = path;
|
||||||
|
// Qualify the original and strip out URI fragment via toUri().getPath()
|
||||||
|
Path stripped = new Path(
|
||||||
|
original.makeQualified(fs.getUri(), fs.getWorkingDirectory())
|
||||||
|
.toUri().getPath());
|
||||||
|
// Re-qualify the bare stripped path and store it
|
||||||
|
this.qualified =
|
||||||
|
stripped.makeQualified(fs.getUri(), fs.getWorkingDirectory());
|
||||||
|
// Strip back down to a plain path
|
||||||
|
this.path = new Path(qualified.toUri().getPath());
|
||||||
|
this.blockSize = blockSize;
|
||||||
|
this.dereference = deref;
|
||||||
|
}
|
||||||
|
|
||||||
|
public FileStatus getFileStatus() throws IOException {
|
||||||
|
run();
|
||||||
|
return stat;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether Stat is supported on the current platform
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public static boolean isAvailable() {
|
||||||
|
if (Shell.LINUX || Shell.FREEBSD) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
FileStatus getFileStatusForTesting() {
|
||||||
|
return stat;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String[] getExecString() {
|
||||||
|
String derefFlag = "-";
|
||||||
|
if (dereference) {
|
||||||
|
derefFlag = "-L";
|
||||||
|
}
|
||||||
|
if (Shell.LINUX) {
|
||||||
|
return new String[] {
|
||||||
|
"stat", derefFlag + "c", "%s,%F,%Y,%X,%a,%U,%G,%N", path.toString() };
|
||||||
|
} else if (Shell.FREEBSD) {
|
||||||
|
return new String[] {
|
||||||
|
"stat", derefFlag + "f", "%z,%HT,%m,%a,%Op,%Su,%Sg,`link' -> `%Y'",
|
||||||
|
path.toString() };
|
||||||
|
} else {
|
||||||
|
throw new UnsupportedOperationException(
|
||||||
|
"stat is not supported on this platform");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void parseExecResult(BufferedReader lines) throws IOException {
|
||||||
|
// Reset stat
|
||||||
|
stat = null;
|
||||||
|
|
||||||
|
String line = lines.readLine();
|
||||||
|
if (line == null) {
|
||||||
|
throw new IOException("Unable to stat path: " + original);
|
||||||
|
}
|
||||||
|
if (line.endsWith("No such file or directory") ||
|
||||||
|
line.endsWith("Not a directory")) {
|
||||||
|
throw new FileNotFoundException("File " + original + " does not exist");
|
||||||
|
}
|
||||||
|
if (line.endsWith("Too many levels of symbolic links")) {
|
||||||
|
throw new IOException("Possible cyclic loop while following symbolic" +
|
||||||
|
" link " + original);
|
||||||
|
}
|
||||||
|
// 6,symbolic link,6,1373584236,1373584236,lrwxrwxrwx,andrew,andrew,`link' -> `target'
|
||||||
|
StringTokenizer tokens = new StringTokenizer(line, ",");
|
||||||
|
try {
|
||||||
|
long length = Long.parseLong(tokens.nextToken());
|
||||||
|
boolean isDir = tokens.nextToken().equalsIgnoreCase("directory") ? true
|
||||||
|
: false;
|
||||||
|
// Convert from seconds to milliseconds
|
||||||
|
long modTime = Long.parseLong(tokens.nextToken())*1000;
|
||||||
|
long accessTime = Long.parseLong(tokens.nextToken())*1000;
|
||||||
|
String octalPerms = tokens.nextToken();
|
||||||
|
// FreeBSD has extra digits beyond 4, truncate them
|
||||||
|
if (octalPerms.length() > 4) {
|
||||||
|
int len = octalPerms.length();
|
||||||
|
octalPerms = octalPerms.substring(len-4, len);
|
||||||
|
}
|
||||||
|
FsPermission perms = new FsPermission(Short.parseShort(octalPerms, 8));
|
||||||
|
String owner = tokens.nextToken();
|
||||||
|
String group = tokens.nextToken();
|
||||||
|
String symStr = tokens.nextToken();
|
||||||
|
// 'notalink'
|
||||||
|
// 'link' -> `target'
|
||||||
|
// '' -> ''
|
||||||
|
Path symlink = null;
|
||||||
|
StringTokenizer symTokens = new StringTokenizer(symStr, "`");
|
||||||
|
symTokens.nextToken();
|
||||||
|
try {
|
||||||
|
String target = symTokens.nextToken();
|
||||||
|
target = target.substring(0, target.length()-1);
|
||||||
|
if (!target.isEmpty()) {
|
||||||
|
symlink = new Path(target);
|
||||||
|
}
|
||||||
|
} catch (NoSuchElementException e) {
|
||||||
|
// null if not a symlink
|
||||||
|
}
|
||||||
|
// Set stat
|
||||||
|
stat = new FileStatus(length, isDir, 1, blockSize, modTime, accessTime,
|
||||||
|
perms, owner, group, symlink, qualified);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
throw new IOException("Unexpected stat output: " + line, e);
|
||||||
|
} catch (NoSuchElementException e) {
|
||||||
|
throw new IOException("Unexpected stat output: " + line, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -17,8 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.fs.local;
|
package org.apache.hadoop.fs.local;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
|
@ -28,13 +26,9 @@ import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.AbstractFileSystem;
|
import org.apache.hadoop.fs.AbstractFileSystem;
|
||||||
import org.apache.hadoop.fs.DelegateToFileSystem;
|
import org.apache.hadoop.fs.DelegateToFileSystem;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
|
||||||
import org.apache.hadoop.fs.FsConstants;
|
import org.apache.hadoop.fs.FsConstants;
|
||||||
import org.apache.hadoop.fs.FsServerDefaults;
|
import org.apache.hadoop.fs.FsServerDefaults;
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.fs.RawLocalFileSystem;
|
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The RawLocalFs implementation of AbstractFileSystem.
|
* The RawLocalFs implementation of AbstractFileSystem.
|
||||||
|
@ -72,90 +66,12 @@ public class RawLocalFs extends DelegateToFileSystem {
|
||||||
public FsServerDefaults getServerDefaults() throws IOException {
|
public FsServerDefaults getServerDefaults() throws IOException {
|
||||||
return LocalConfigKeys.getServerDefaults();
|
return LocalConfigKeys.getServerDefaults();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean supportsSymlinks() {
|
public boolean isValidName(String src) {
|
||||||
|
// Different local file systems have different validation rules. Skip
|
||||||
|
// validation here and just let the OS handle it. This is consistent with
|
||||||
|
// RawLocalFileSystem.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void createSymlink(Path target, Path link, boolean createParent)
|
|
||||||
throws IOException {
|
|
||||||
final String targetScheme = target.toUri().getScheme();
|
|
||||||
if (targetScheme != null && !"file".equals(targetScheme)) {
|
|
||||||
throw new IOException("Unable to create symlink to non-local file "+
|
|
||||||
"system: "+target.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (createParent) {
|
|
||||||
mkdir(link.getParent(), FsPermission.getDirDefault(), true);
|
|
||||||
}
|
|
||||||
|
|
||||||
// NB: Use createSymbolicLink in java.nio.file.Path once available
|
|
||||||
int result = FileUtil.symLink(target.toString(), link.toString());
|
|
||||||
if (result != 0) {
|
|
||||||
throw new IOException("Error " + result + " creating symlink " +
|
|
||||||
link + " to " + target);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a FileStatus representing the given path. If the path refers
|
|
||||||
* to a symlink return a FileStatus representing the link rather than
|
|
||||||
* the object the link refers to.
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public FileStatus getFileLinkStatus(final Path f) throws IOException {
|
|
||||||
String target = FileUtil.readLink(new File(f.toString()));
|
|
||||||
try {
|
|
||||||
FileStatus fs = getFileStatus(f);
|
|
||||||
// If f refers to a regular file or directory
|
|
||||||
if (target.isEmpty()) {
|
|
||||||
return fs;
|
|
||||||
}
|
|
||||||
// Otherwise f refers to a symlink
|
|
||||||
return new FileStatus(fs.getLen(),
|
|
||||||
false,
|
|
||||||
fs.getReplication(),
|
|
||||||
fs.getBlockSize(),
|
|
||||||
fs.getModificationTime(),
|
|
||||||
fs.getAccessTime(),
|
|
||||||
fs.getPermission(),
|
|
||||||
fs.getOwner(),
|
|
||||||
fs.getGroup(),
|
|
||||||
new Path(target),
|
|
||||||
f);
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
/* The exists method in the File class returns false for dangling
|
|
||||||
* links so we can get a FileNotFoundException for links that exist.
|
|
||||||
* It's also possible that we raced with a delete of the link. Use
|
|
||||||
* the readBasicFileAttributes method in java.nio.file.attributes
|
|
||||||
* when available.
|
|
||||||
*/
|
|
||||||
if (!target.isEmpty()) {
|
|
||||||
return new FileStatus(0, false, 0, 0, 0, 0, FsPermission.getDefault(),
|
|
||||||
"", "", new Path(target), f);
|
|
||||||
}
|
|
||||||
// f refers to a file or directory that does not exist
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isValidName(String src) {
|
|
||||||
// Different local file systems have different validation rules. Skip
|
|
||||||
// validation here and just let the OS handle it. This is consistent with
|
|
||||||
// RawLocalFileSystem.
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Path getLinkTarget(Path f) throws IOException {
|
|
||||||
/* We should never get here. Valid local links are resolved transparently
|
|
||||||
* by the underlying local file system and accessing a dangling link will
|
|
||||||
* result in an IOException, not an UnresolvedLinkException, so FileContext
|
|
||||||
* should never call this function.
|
|
||||||
*/
|
|
||||||
throw new AssertionError();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -133,7 +133,8 @@ class CopyCommands {
|
||||||
"Copy files that match the file pattern <src> to a\n" +
|
"Copy files that match the file pattern <src> to a\n" +
|
||||||
"destination. When copying multiple files, the destination\n" +
|
"destination. When copying multiple files, the destination\n" +
|
||||||
"must be a directory. Passing -p preserves access and\n" +
|
"must be a directory. Passing -p preserves access and\n" +
|
||||||
"modification times, ownership and the mode.\n";
|
"modification times, ownership and the mode. Passing -f\n" +
|
||||||
|
"overwrites the destination if it already exists.\n";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void processOptions(LinkedList<String> args) throws IOException {
|
protected void processOptions(LinkedList<String> args) throws IOException {
|
||||||
|
@ -186,7 +187,8 @@ class CopyCommands {
|
||||||
"into fs. Copying fails if the file already\n" +
|
"into fs. Copying fails if the file already\n" +
|
||||||
"exists, unless the -f flag is given. Passing\n" +
|
"exists, unless the -f flag is given. Passing\n" +
|
||||||
"-p preserves access and modification times,\n" +
|
"-p preserves access and modification times,\n" +
|
||||||
"ownership and the mode.\n";
|
"ownership and the mode. Passing -f overwrites\n" +
|
||||||
|
"the destination if it already exists.\n";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void processOptions(LinkedList<String> args) throws IOException {
|
protected void processOptions(LinkedList<String> args) throws IOException {
|
||||||
|
|
|
@ -79,7 +79,7 @@ class SnapshotCommands extends FsCommand {
|
||||||
protected void processArguments(LinkedList<PathData> items)
|
protected void processArguments(LinkedList<PathData> items)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super.processArguments(items);
|
super.processArguments(items);
|
||||||
if (exitCode != 0) { // check for error collecting paths
|
if (numErrors != 0) { // check for error collecting paths
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
assert(items.size() == 1);
|
assert(items.size() == 1);
|
||||||
|
@ -119,7 +119,7 @@ class SnapshotCommands extends FsCommand {
|
||||||
protected void processArguments(LinkedList<PathData> items)
|
protected void processArguments(LinkedList<PathData> items)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super.processArguments(items);
|
super.processArguments(items);
|
||||||
if (exitCode != 0) { // check for error collecting paths
|
if (numErrors != 0) { // check for error collecting paths
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
assert (items.size() == 1);
|
assert (items.size() == 1);
|
||||||
|
@ -160,7 +160,7 @@ class SnapshotCommands extends FsCommand {
|
||||||
protected void processArguments(LinkedList<PathData> items)
|
protected void processArguments(LinkedList<PathData> items)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super.processArguments(items);
|
super.processArguments(items);
|
||||||
if (exitCode != 0) { // check for error collecting paths
|
if (numErrors != 0) { // check for error collecting paths
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Preconditions.checkArgument(items.size() == 1);
|
Preconditions.checkArgument(items.size() == 1);
|
||||||
|
|
|
@ -47,6 +47,7 @@ import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.ConfServlet;
|
import org.apache.hadoop.conf.ConfServlet;
|
||||||
|
@ -119,18 +120,117 @@ public class HttpServer implements FilterContainer {
|
||||||
protected final Map<Context, Boolean> defaultContexts =
|
protected final Map<Context, Boolean> defaultContexts =
|
||||||
new HashMap<Context, Boolean>();
|
new HashMap<Context, Boolean>();
|
||||||
protected final List<String> filterNames = new ArrayList<String>();
|
protected final List<String> filterNames = new ArrayList<String>();
|
||||||
private static final int MAX_RETRIES = 10;
|
|
||||||
static final String STATE_DESCRIPTION_ALIVE = " - alive";
|
static final String STATE_DESCRIPTION_ALIVE = " - alive";
|
||||||
static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
|
static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
|
||||||
|
|
||||||
private final boolean listenerStartedExternally;
|
private final boolean listenerStartedExternally;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class to construct instances of HTTP server with specific options.
|
||||||
|
*/
|
||||||
|
public static class Builder {
|
||||||
|
String name;
|
||||||
|
String bindAddress;
|
||||||
|
Integer port;
|
||||||
|
Boolean findPort;
|
||||||
|
Configuration conf;
|
||||||
|
Connector connector;
|
||||||
|
String[] pathSpecs;
|
||||||
|
AccessControlList adminsAcl;
|
||||||
|
boolean securityEnabled = false;
|
||||||
|
String usernameConfKey = null;
|
||||||
|
String keytabConfKey = null;
|
||||||
|
|
||||||
|
public Builder setName(String name){
|
||||||
|
this.name = name;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setBindAddress(String bindAddress){
|
||||||
|
this.bindAddress = bindAddress;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setPort(int port) {
|
||||||
|
this.port = port;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setFindPort(boolean findPort) {
|
||||||
|
this.findPort = findPort;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setConf(Configuration conf) {
|
||||||
|
this.conf = conf;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setConnector(Connector connector) {
|
||||||
|
this.connector = connector;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setPathSpec(String[] pathSpec) {
|
||||||
|
this.pathSpecs = pathSpec;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setACL(AccessControlList acl) {
|
||||||
|
this.adminsAcl = acl;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setSecurityEnabled(boolean securityEnabled) {
|
||||||
|
this.securityEnabled = securityEnabled;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setUsernameConfKey(String usernameConfKey) {
|
||||||
|
this.usernameConfKey = usernameConfKey;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setKeytabConfKey(String keytabConfKey) {
|
||||||
|
this.keytabConfKey = keytabConfKey;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public HttpServer build() throws IOException {
|
||||||
|
if (this.name == null) {
|
||||||
|
throw new HadoopIllegalArgumentException("name is not set");
|
||||||
|
}
|
||||||
|
if (this.bindAddress == null) {
|
||||||
|
throw new HadoopIllegalArgumentException("bindAddress is not set");
|
||||||
|
}
|
||||||
|
if (this.port == null) {
|
||||||
|
throw new HadoopIllegalArgumentException("port is not set");
|
||||||
|
}
|
||||||
|
if (this.findPort == null) {
|
||||||
|
throw new HadoopIllegalArgumentException("findPort is not set");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.conf == null) {
|
||||||
|
conf = new Configuration();
|
||||||
|
}
|
||||||
|
|
||||||
|
HttpServer server = new HttpServer(this.name, this.bindAddress, this.port,
|
||||||
|
this.findPort, this.conf, this.adminsAcl, this.connector, this.pathSpecs);
|
||||||
|
if (this.securityEnabled) {
|
||||||
|
server.initSpnego(this.conf, this.usernameConfKey, this.keytabConfKey);
|
||||||
|
}
|
||||||
|
return server;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Same as this(name, bindAddress, port, findPort, null); */
|
/** Same as this(name, bindAddress, port, findPort, null); */
|
||||||
|
@Deprecated
|
||||||
public HttpServer(String name, String bindAddress, int port, boolean findPort
|
public HttpServer(String name, String bindAddress, int port, boolean findPort
|
||||||
) throws IOException {
|
) throws IOException {
|
||||||
this(name, bindAddress, port, findPort, new Configuration());
|
this(name, bindAddress, port, findPort, new Configuration());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
public HttpServer(String name, String bindAddress, int port,
|
public HttpServer(String name, String bindAddress, int port,
|
||||||
boolean findPort, Configuration conf, Connector connector) throws IOException {
|
boolean findPort, Configuration conf, Connector connector) throws IOException {
|
||||||
this(name, bindAddress, port, findPort, conf, null, connector, null);
|
this(name, bindAddress, port, findPort, conf, null, connector, null);
|
||||||
|
@ -150,6 +250,7 @@ public class HttpServer implements FilterContainer {
|
||||||
* @param pathSpecs Path specifications that this httpserver will be serving.
|
* @param pathSpecs Path specifications that this httpserver will be serving.
|
||||||
* These will be added to any filters.
|
* These will be added to any filters.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public HttpServer(String name, String bindAddress, int port,
|
public HttpServer(String name, String bindAddress, int port,
|
||||||
boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
|
boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
|
||||||
this(name, bindAddress, port, findPort, conf, null, null, pathSpecs);
|
this(name, bindAddress, port, findPort, conf, null, null, pathSpecs);
|
||||||
|
@ -164,11 +265,13 @@ public class HttpServer implements FilterContainer {
|
||||||
* increment by 1 until it finds a free port.
|
* increment by 1 until it finds a free port.
|
||||||
* @param conf Configuration
|
* @param conf Configuration
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public HttpServer(String name, String bindAddress, int port,
|
public HttpServer(String name, String bindAddress, int port,
|
||||||
boolean findPort, Configuration conf) throws IOException {
|
boolean findPort, Configuration conf) throws IOException {
|
||||||
this(name, bindAddress, port, findPort, conf, null, null, null);
|
this(name, bindAddress, port, findPort, conf, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
public HttpServer(String name, String bindAddress, int port,
|
public HttpServer(String name, String bindAddress, int port,
|
||||||
boolean findPort, Configuration conf, AccessControlList adminsAcl)
|
boolean findPort, Configuration conf, AccessControlList adminsAcl)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -186,6 +289,7 @@ public class HttpServer implements FilterContainer {
|
||||||
* @param conf Configuration
|
* @param conf Configuration
|
||||||
* @param adminsAcl {@link AccessControlList} of the admins
|
* @param adminsAcl {@link AccessControlList} of the admins
|
||||||
*/
|
*/
|
||||||
|
@Deprecated
|
||||||
public HttpServer(String name, String bindAddress, int port,
|
public HttpServer(String name, String bindAddress, int port,
|
||||||
boolean findPort, Configuration conf, AccessControlList adminsAcl,
|
boolean findPort, Configuration conf, AccessControlList adminsAcl,
|
||||||
Connector connector) throws IOException {
|
Connector connector) throws IOException {
|
||||||
|
@ -529,7 +633,7 @@ public class HttpServer implements FilterContainer {
|
||||||
/**
|
/**
|
||||||
* Define a filter for a context and set up default url mappings.
|
* Define a filter for a context and set up default url mappings.
|
||||||
*/
|
*/
|
||||||
protected void defineFilter(Context ctx, String name,
|
public void defineFilter(Context ctx, String name,
|
||||||
String classname, Map<String,String> parameters, String[] urls) {
|
String classname, Map<String,String> parameters, String[] urls) {
|
||||||
|
|
||||||
FilterHolder holder = new FilterHolder();
|
FilterHolder holder = new FilterHolder();
|
||||||
|
@ -569,6 +673,10 @@ public class HttpServer implements FilterContainer {
|
||||||
public Object getAttribute(String name) {
|
public Object getAttribute(String name) {
|
||||||
return webAppContext.getAttribute(name);
|
return webAppContext.getAttribute(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public WebAppContext getWebAppContext(){
|
||||||
|
return this.webAppContext;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the pathname to the webapps files.
|
* Get the pathname to the webapps files.
|
||||||
|
|
|
@ -136,8 +136,6 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler {
|
||||||
msg += ". Trying to fail over " + formatSleepMessage(action.delayMillis);
|
msg += ". Trying to fail over " + formatSleepMessage(action.delayMillis);
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug(msg, e);
|
LOG.debug(msg, e);
|
||||||
} else {
|
|
||||||
LOG.warn(msg);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
|
|
|
@ -1311,7 +1311,15 @@ public abstract class Server {
|
||||||
Throwable cause = e;
|
Throwable cause = e;
|
||||||
while (cause != null) {
|
while (cause != null) {
|
||||||
if (cause instanceof InvalidToken) {
|
if (cause instanceof InvalidToken) {
|
||||||
sendToClient = (InvalidToken) cause;
|
// FIXME: hadoop method signatures are restricting the SASL
|
||||||
|
// callbacks to only returning InvalidToken, but some services
|
||||||
|
// need to throw other exceptions (ex. NN + StandyException),
|
||||||
|
// so for now we'll tunnel the real exceptions via an
|
||||||
|
// InvalidToken's cause which normally is not set
|
||||||
|
if (cause.getCause() != null) {
|
||||||
|
cause = cause.getCause();
|
||||||
|
}
|
||||||
|
sendToClient = (IOException) cause;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
cause = cause.getCause();
|
cause = cause.getCause();
|
||||||
|
|
|
@ -46,8 +46,8 @@ public enum DefaultMetricsSystem {
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
volatile boolean miniClusterMode = false;
|
volatile boolean miniClusterMode = false;
|
||||||
|
|
||||||
final UniqueNames mBeanNames = new UniqueNames();
|
transient final UniqueNames mBeanNames = new UniqueNames();
|
||||||
final UniqueNames sourceNames = new UniqueNames();
|
transient final UniqueNames sourceNames = new UniqueNames();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convenience method to initialize the metrics system
|
* Convenience method to initialize the metrics system
|
||||||
|
|
|
@ -104,7 +104,7 @@ public class SaslRpcServer {
|
||||||
if (LOG.isDebugEnabled())
|
if (LOG.isDebugEnabled())
|
||||||
LOG.debug("Kerberos principal name is " + fullName);
|
LOG.debug("Kerberos principal name is " + fullName);
|
||||||
// don't use KerberosName because we don't want auth_to_local
|
// don't use KerberosName because we don't want auth_to_local
|
||||||
String[] parts = fullName.split("[/@]", 2);
|
String[] parts = fullName.split("[/@]", 3);
|
||||||
protocol = parts[0];
|
protocol = parts[0];
|
||||||
// should verify service host is present here rather than in create()
|
// should verify service host is present here rather than in create()
|
||||||
// but lazy tests are using a UGI that isn't a SPN...
|
// but lazy tests are using a UGI that isn't a SPN...
|
||||||
|
@ -127,7 +127,6 @@ public class SaslRpcServer {
|
||||||
final CallbackHandler callback;
|
final CallbackHandler callback;
|
||||||
switch (authMethod) {
|
switch (authMethod) {
|
||||||
case TOKEN: {
|
case TOKEN: {
|
||||||
secretManager.checkAvailableForRead();
|
|
||||||
callback = new SaslDigestCallbackHandler(secretManager, connection);
|
callback = new SaslDigestCallbackHandler(secretManager, connection);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,6 +58,45 @@ abstract public class Shell {
|
||||||
/** Windows CreateProcess synchronization object */
|
/** Windows CreateProcess synchronization object */
|
||||||
public static final Object WindowsProcessLaunchLock = new Object();
|
public static final Object WindowsProcessLaunchLock = new Object();
|
||||||
|
|
||||||
|
// OSType detection
|
||||||
|
|
||||||
|
public enum OSType {
|
||||||
|
OS_TYPE_LINUX,
|
||||||
|
OS_TYPE_WIN,
|
||||||
|
OS_TYPE_SOLARIS,
|
||||||
|
OS_TYPE_MAC,
|
||||||
|
OS_TYPE_FREEBSD,
|
||||||
|
OS_TYPE_OTHER
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final OSType osType = getOSType();
|
||||||
|
|
||||||
|
static private OSType getOSType() {
|
||||||
|
String osName = System.getProperty("os.name");
|
||||||
|
if (osName.startsWith("Windows")) {
|
||||||
|
return OSType.OS_TYPE_WIN;
|
||||||
|
} else if (osName.contains("SunOS") || osName.contains("Solaris")) {
|
||||||
|
return OSType.OS_TYPE_SOLARIS;
|
||||||
|
} else if (osName.contains("Mac")) {
|
||||||
|
return OSType.OS_TYPE_MAC;
|
||||||
|
} else if (osName.contains("FreeBSD")) {
|
||||||
|
return OSType.OS_TYPE_FREEBSD;
|
||||||
|
} else if (osName.startsWith("Linux")) {
|
||||||
|
return OSType.OS_TYPE_LINUX;
|
||||||
|
} else {
|
||||||
|
// Some other form of Unix
|
||||||
|
return OSType.OS_TYPE_OTHER;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper static vars for each platform
|
||||||
|
public static final boolean WINDOWS = (osType == OSType.OS_TYPE_WIN);
|
||||||
|
public static final boolean SOLARIS = (osType == OSType.OS_TYPE_SOLARIS);
|
||||||
|
public static final boolean MAC = (osType == OSType.OS_TYPE_MAC);
|
||||||
|
public static final boolean FREEBSD = (osType == OSType.OS_TYPE_FREEBSD);
|
||||||
|
public static final boolean LINUX = (osType == OSType.OS_TYPE_LINUX);
|
||||||
|
public static final boolean OTHER = (osType == OSType.OS_TYPE_OTHER);
|
||||||
|
|
||||||
/** a Unix command to get the current user's groups list */
|
/** a Unix command to get the current user's groups list */
|
||||||
public static String[] getGroupsCommand() {
|
public static String[] getGroupsCommand() {
|
||||||
return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
|
return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
|
||||||
|
@ -282,13 +321,6 @@ abstract public class Shell {
|
||||||
return exeFile.getCanonicalPath();
|
return exeFile.getCanonicalPath();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Set to true on Windows platforms */
|
|
||||||
public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
|
|
||||||
= System.getProperty("os.name").startsWith("Windows");
|
|
||||||
|
|
||||||
public static final boolean LINUX
|
|
||||||
= System.getProperty("os.name").startsWith("Linux");
|
|
||||||
|
|
||||||
/** a Windows utility to emulate Unix commands */
|
/** a Windows utility to emulate Unix commands */
|
||||||
public static final String WINUTILS = getWinUtilsPath();
|
public static final String WINUTILS = getWinUtilsPath();
|
||||||
|
|
||||||
|
@ -336,6 +368,7 @@ abstract public class Shell {
|
||||||
|
|
||||||
private long interval; // refresh interval in msec
|
private long interval; // refresh interval in msec
|
||||||
private long lastTime; // last time the command was performed
|
private long lastTime; // last time the command was performed
|
||||||
|
final private boolean redirectErrorStream; // merge stdout and stderr
|
||||||
private Map<String, String> environment; // env for the command execution
|
private Map<String, String> environment; // env for the command execution
|
||||||
private File dir;
|
private File dir;
|
||||||
private Process process; // sub process used to execute the command
|
private Process process; // sub process used to execute the command
|
||||||
|
@ -348,13 +381,18 @@ abstract public class Shell {
|
||||||
this(0L);
|
this(0L);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Shell(long interval) {
|
||||||
|
this(interval, false);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param interval the minimum duration to wait before re-executing the
|
* @param interval the minimum duration to wait before re-executing the
|
||||||
* command.
|
* command.
|
||||||
*/
|
*/
|
||||||
public Shell( long interval ) {
|
public Shell(long interval, boolean redirectErrorStream) {
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
this.lastTime = (interval<0) ? 0 : -interval;
|
this.lastTime = (interval<0) ? 0 : -interval;
|
||||||
|
this.redirectErrorStream = redirectErrorStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** set the environment for the command
|
/** set the environment for the command
|
||||||
|
@ -393,6 +431,8 @@ abstract public class Shell {
|
||||||
if (dir != null) {
|
if (dir != null) {
|
||||||
builder.directory(this.dir);
|
builder.directory(this.dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
builder.redirectErrorStream(redirectErrorStream);
|
||||||
|
|
||||||
if (Shell.WINDOWS) {
|
if (Shell.WINDOWS) {
|
||||||
synchronized (WindowsProcessLaunchLock) {
|
synchronized (WindowsProcessLaunchLock) {
|
||||||
|
|
|
@ -90,6 +90,10 @@ public class VersionInfo {
|
||||||
" source checksum " + _getSrcChecksum();
|
" source checksum " + _getSrcChecksum();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected String _getProtocVersion() {
|
||||||
|
return info.getProperty("protocVersion", "Unknown");
|
||||||
|
}
|
||||||
|
|
||||||
private static VersionInfo COMMON_VERSION_INFO = new VersionInfo("common");
|
private static VersionInfo COMMON_VERSION_INFO = new VersionInfo("common");
|
||||||
/**
|
/**
|
||||||
* Get the Hadoop version.
|
* Get the Hadoop version.
|
||||||
|
@ -153,12 +157,20 @@ public class VersionInfo {
|
||||||
public static String getBuildVersion(){
|
public static String getBuildVersion(){
|
||||||
return COMMON_VERSION_INFO._getBuildVersion();
|
return COMMON_VERSION_INFO._getBuildVersion();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the protoc version used for the build.
|
||||||
|
*/
|
||||||
|
public static String getProtocVersion(){
|
||||||
|
return COMMON_VERSION_INFO._getProtocVersion();
|
||||||
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
LOG.debug("version: "+ getVersion());
|
LOG.debug("version: "+ getVersion());
|
||||||
System.out.println("Hadoop " + getVersion());
|
System.out.println("Hadoop " + getVersion());
|
||||||
System.out.println("Subversion " + getUrl() + " -r " + getRevision());
|
System.out.println("Subversion " + getUrl() + " -r " + getRevision());
|
||||||
System.out.println("Compiled by " + getUser() + " on " + getDate());
|
System.out.println("Compiled by " + getUser() + " on " + getDate());
|
||||||
|
System.out.println("Compiled with protoc " + getProtocVersion());
|
||||||
System.out.println("From source with checksum " + getSrcChecksum());
|
System.out.println("From source with checksum " + getSrcChecksum());
|
||||||
System.out.println("This command was run using " +
|
System.out.println("This command was run using " +
|
||||||
ClassUtil.findContainingJar(VersionInfo.class));
|
ClassUtil.findContainingJar(VersionInfo.class));
|
||||||
|
|
|
@ -23,3 +23,4 @@ user=${user.name}
|
||||||
date=${version-info.build.time}
|
date=${version-info.build.time}
|
||||||
url=${version-info.scm.uri}
|
url=${version-info.scm.uri}
|
||||||
srcChecksum=${version-info.source.md5}
|
srcChecksum=${version-info.source.md5}
|
||||||
|
protocVersion=${protobuf.version}
|
|
@ -42,7 +42,7 @@ Hadoop MapReduce Next Generation - CLI MiniCluster.
|
||||||
$ mvn clean install -DskipTests
|
$ mvn clean install -DskipTests
|
||||||
$ mvn package -Pdist -Dtar -DskipTests -Dmaven.javadoc.skip
|
$ mvn package -Pdist -Dtar -DskipTests -Dmaven.javadoc.skip
|
||||||
+---+
|
+---+
|
||||||
<<NOTE:>> You will need protoc installed of version 2.4.1 or greater.
|
<<NOTE:>> You will need protoc 2.5.0 installed.
|
||||||
|
|
||||||
The tarball should be available in <<<hadoop-dist/target/>>> directory.
|
The tarball should be available in <<<hadoop-dist/target/>>> directory.
|
||||||
|
|
||||||
|
|
|
@ -86,11 +86,14 @@ chgrp
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -chgrp [-R] GROUP URI [URI ...]>>>
|
Usage: <<<hdfs dfs -chgrp [-R] GROUP URI [URI ...]>>>
|
||||||
|
|
||||||
Change group association of files. With -R, make the change recursively
|
Change group association of files. The user must be the owner of files, or
|
||||||
through the directory structure. The user must be the owner of files, or
|
|
||||||
else a super-user. Additional information is in the
|
else a super-user. Additional information is in the
|
||||||
{{{betterurl}Permissions Guide}}.
|
{{{betterurl}Permissions Guide}}.
|
||||||
|
|
||||||
|
Options
|
||||||
|
|
||||||
|
* The -R option will make the change recursively through the directory structure.
|
||||||
|
|
||||||
chmod
|
chmod
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -chmod [-R] <MODE[,MODE]... | OCTALMODE> URI [URI ...]>>>
|
Usage: <<<hdfs dfs -chmod [-R] <MODE[,MODE]... | OCTALMODE> URI [URI ...]>>>
|
||||||
|
@ -100,14 +103,21 @@ chmod
|
||||||
else a super-user. Additional information is in the
|
else a super-user. Additional information is in the
|
||||||
{{{betterurl}Permissions Guide}}.
|
{{{betterurl}Permissions Guide}}.
|
||||||
|
|
||||||
|
Options
|
||||||
|
|
||||||
|
* The -R option will make the change recursively through the directory structure.
|
||||||
|
|
||||||
chown
|
chown
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -chown [-R] [OWNER][:[GROUP]] URI [URI ]>>>
|
Usage: <<<hdfs dfs -chown [-R] [OWNER][:[GROUP]] URI [URI ]>>>
|
||||||
|
|
||||||
Change the owner of files. With -R, make the change recursively through the
|
Change the owner of files. The user must be a super-user. Additional information
|
||||||
directory structure. The user must be a super-user. Additional information
|
|
||||||
is in the {{{betterurl}Permissions Guide}}.
|
is in the {{{betterurl}Permissions Guide}}.
|
||||||
|
|
||||||
|
Options
|
||||||
|
|
||||||
|
* The -R option will make the change recursively through the directory structure.
|
||||||
|
|
||||||
copyFromLocal
|
copyFromLocal
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -copyFromLocal <localsrc> URI>>>
|
Usage: <<<hdfs dfs -copyFromLocal <localsrc> URI>>>
|
||||||
|
@ -115,6 +125,10 @@ copyFromLocal
|
||||||
Similar to put command, except that the source is restricted to a local
|
Similar to put command, except that the source is restricted to a local
|
||||||
file reference.
|
file reference.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
* The -f option will overwrite the destination if it already exists.
|
||||||
|
|
||||||
copyToLocal
|
copyToLocal
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -copyToLocal [-ignorecrc] [-crc] URI <localdst> >>>
|
Usage: <<<hdfs dfs -copyToLocal [-ignorecrc] [-crc] URI <localdst> >>>
|
||||||
|
@ -145,11 +159,15 @@ count
|
||||||
|
|
||||||
cp
|
cp
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -cp URI [URI ...] <dest> >>>
|
Usage: <<<hdfs dfs -cp [-f] URI [URI ...] <dest> >>>
|
||||||
|
|
||||||
Copy files from source to destination. This command allows multiple sources
|
Copy files from source to destination. This command allows multiple sources
|
||||||
as well in which case the destination must be a directory.
|
as well in which case the destination must be a directory.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
* The -f option will overwrite the destination if it already exists.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
* <<<hdfs dfs -cp /user/hadoop/file1 /user/hadoop/file2>>>
|
* <<<hdfs dfs -cp /user/hadoop/file1 /user/hadoop/file2>>>
|
||||||
|
@ -232,7 +250,7 @@ ls
|
||||||
permissions number_of_replicas userid groupid filesize modification_date modification_time filename
|
permissions number_of_replicas userid groupid filesize modification_date modification_time filename
|
||||||
+---+
|
+---+
|
||||||
|
|
||||||
For a directory it returns list of its direct children as in unix.A directory is listed as:
|
For a directory it returns list of its direct children as in Unix. A directory is listed as:
|
||||||
|
|
||||||
+---+
|
+---+
|
||||||
permissions userid groupid modification_date modification_time dirname
|
permissions userid groupid modification_date modification_time dirname
|
||||||
|
@ -256,8 +274,11 @@ mkdir
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -mkdir [-p] <paths> >>>
|
Usage: <<<hdfs dfs -mkdir [-p] <paths> >>>
|
||||||
|
|
||||||
Takes path uri's as argument and creates directories. With -p the behavior
|
Takes path uri's as argument and creates directories.
|
||||||
is much like unix mkdir -p creating parent directories along the path.
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
* The -p option behavior is much like Unix mkdir -p, creating parent directories along the path.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
|
@ -362,8 +383,11 @@ setrep
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -setrep [-R] <path> >>>
|
Usage: <<<hdfs dfs -setrep [-R] <path> >>>
|
||||||
|
|
||||||
Changes the replication factor of a file. -R option is for recursively
|
Changes the replication factor of a file.
|
||||||
increasing the replication factor of files within a directory.
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
* The -R option will recursively increase the replication factor of files within a directory.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
|
@ -390,8 +414,11 @@ tail
|
||||||
|
|
||||||
Usage: <<<hdfs dfs -tail [-f] URI>>>
|
Usage: <<<hdfs dfs -tail [-f] URI>>>
|
||||||
|
|
||||||
Displays last kilobyte of the file to stdout. -f option can be used as in
|
Displays last kilobyte of the file to stdout.
|
||||||
Unix.
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
* The -f option will output appended data as the file grows, as in Unix.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
|
@ -406,13 +433,9 @@ test
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
|
|
||||||
*----+------------+
|
* The -e option will check to see if the file exists, returning 0 if true.
|
||||||
| -e | check to see if the file exists. Return 0 if true.
|
* The -z option will check to see if the file is zero length, returning 0 if true.
|
||||||
*----+------------+
|
* The -d option will check to see if the path is directory, returning 0 if true.
|
||||||
| -z | check to see if the file is zero length. Return 0 if true.
|
|
||||||
*----+------------+
|
|
||||||
| -d | check to see if the path is directory. Return 0 if true.
|
|
||||||
*----+------------+
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ $ mvn clean install -DskipTests
|
||||||
$ cd hadoop-mapreduce-project
|
$ cd hadoop-mapreduce-project
|
||||||
$ mvn clean install assembly:assembly -Pnative
|
$ mvn clean install assembly:assembly -Pnative
|
||||||
+---+
|
+---+
|
||||||
<<NOTE:>> You will need protoc installed of version 2.4.1 or greater.
|
<<NOTE:>> You will need protoc 2.5.0 installed.
|
||||||
|
|
||||||
To ignore the native builds in mapreduce you can omit the <<<-Pnative>>> argument
|
To ignore the native builds in mapreduce you can omit the <<<-Pnative>>> argument
|
||||||
for maven. The tarball should be available in <<<target/>>> directory.
|
for maven. The tarball should be available in <<<target/>>> directory.
|
||||||
|
|
|
@ -1272,7 +1272,23 @@ public class TestConfiguration extends TestCase {
|
||||||
Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
|
Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
|
||||||
assertNotNull(clazz);
|
assertNotNull(clazz);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetFinalParameters() throws Exception {
|
||||||
|
out=new BufferedWriter(new FileWriter(CONFIG));
|
||||||
|
startConfig();
|
||||||
|
declareProperty("my.var", "x", "x", true);
|
||||||
|
endConfig();
|
||||||
|
Path fileResource = new Path(CONFIG);
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
Set<String> finalParameters = conf.getFinalParameters();
|
||||||
|
assertFalse("my.var already exists", finalParameters.contains("my.var"));
|
||||||
|
conf.addResource(fileResource);
|
||||||
|
assertEquals("my.var is undefined", "x", conf.get("my.var"));
|
||||||
|
assertFalse("finalparams not copied", finalParameters.contains("my.var"));
|
||||||
|
finalParameters = conf.getFinalParameters();
|
||||||
|
assertTrue("my.var is not final", finalParameters.contains("my.var"));
|
||||||
|
}
|
||||||
|
|
||||||
public static void main(String[] argv) throws Exception {
|
public static void main(String[] argv) throws Exception {
|
||||||
junit.textui.TestRunner.main(new String[]{
|
junit.textui.TestRunner.main(new String[]{
|
||||||
TestConfiguration.class.getName()
|
TestConfiguration.class.getName()
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.hadoop.fs.Options.Rename;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
import org.junit.Assume;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -632,6 +633,20 @@ public abstract class FileContextMainOperationsBaseTest {
|
||||||
filteredPaths));
|
filteredPaths));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected Path getHiddenPathForTest() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGlobStatusFilterWithHiddenPathTrivialFilter()
|
||||||
|
throws Exception {
|
||||||
|
Path hidden = getHiddenPathForTest();
|
||||||
|
Assume.assumeNotNull(hidden);
|
||||||
|
FileStatus[] filteredPaths = fc.util().globStatus(hidden, DEFAULT_FILTER);
|
||||||
|
Assert.assertNotNull(filteredPaths);
|
||||||
|
Assert.assertEquals(1, filteredPaths.length);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWriteReadAndDeleteEmptyFile() throws Exception {
|
public void testWriteReadAndDeleteEmptyFile() throws Exception {
|
||||||
writeReadAndDelete(0);
|
writeReadAndDelete(0);
|
||||||
|
|
|
@ -517,6 +517,26 @@ public class TestFsShellReturnCode {
|
||||||
}
|
}
|
||||||
return stat;
|
return stat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileStatus getFileLinkStatus(Path p) throws IOException {
|
||||||
|
String f = makeQualified(p).toString();
|
||||||
|
FileStatus stat = super.getFileLinkStatus(p);
|
||||||
|
|
||||||
|
stat.getPermission();
|
||||||
|
if (owners.containsKey(f)) {
|
||||||
|
stat.setOwner("STUB-"+owners.get(f));
|
||||||
|
} else {
|
||||||
|
stat.setOwner("REAL-"+stat.getOwner());
|
||||||
|
}
|
||||||
|
if (groups.containsKey(f)) {
|
||||||
|
stat.setGroup("STUB-"+groups.get(f));
|
||||||
|
} else {
|
||||||
|
stat.setGroup("REAL-"+stat.getGroup());
|
||||||
|
}
|
||||||
|
return stat;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class MyFsShell extends FsShell {
|
static class MyFsShell extends FsShell {
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
|
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
import java.net.URI;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
@ -363,12 +364,12 @@ public class TestLocalFileSystem {
|
||||||
|
|
||||||
FileStatus status = fileSys.getFileStatus(path);
|
FileStatus status = fileSys.getFileStatus(path);
|
||||||
assertTrue("check we're actually changing something", newModTime != status.getModificationTime());
|
assertTrue("check we're actually changing something", newModTime != status.getModificationTime());
|
||||||
assertEquals(0, status.getAccessTime());
|
long accessTime = status.getAccessTime();
|
||||||
|
|
||||||
fileSys.setTimes(path, newModTime, -1);
|
fileSys.setTimes(path, newModTime, -1);
|
||||||
status = fileSys.getFileStatus(path);
|
status = fileSys.getFileStatus(path);
|
||||||
assertEquals(newModTime, status.getModificationTime());
|
assertEquals(newModTime, status.getModificationTime());
|
||||||
assertEquals(0, status.getAccessTime());
|
assertEquals(accessTime, status.getAccessTime());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -520,4 +521,18 @@ public class TestLocalFileSystem {
|
||||||
fail(s);
|
fail(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testStripFragmentFromPath() throws Exception {
|
||||||
|
FileSystem fs = FileSystem.getLocal(new Configuration());
|
||||||
|
Path pathQualified = TEST_PATH.makeQualified(fs.getUri(),
|
||||||
|
fs.getWorkingDirectory());
|
||||||
|
Path pathWithFragment = new Path(
|
||||||
|
new URI(pathQualified.toString() + "#glacier"));
|
||||||
|
// Create test file with fragment
|
||||||
|
FileSystemTestHelper.createFile(fs, pathWithFragment);
|
||||||
|
Path resolved = fs.resolvePath(pathWithFragment);
|
||||||
|
assertEquals("resolvePath did not strip fragment from Path", pathQualified,
|
||||||
|
resolved);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,122 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.fs;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.FileNotFoundException;
|
||||||
|
import java.io.StringReader;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class TestStat {
|
||||||
|
|
||||||
|
private static Stat stat;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setup() throws Exception {
|
||||||
|
stat = new Stat(new Path("/dummypath"),
|
||||||
|
4096l, false, FileSystem.get(new Configuration()));
|
||||||
|
}
|
||||||
|
|
||||||
|
private class StatOutput {
|
||||||
|
final String doesNotExist;
|
||||||
|
final String directory;
|
||||||
|
final String file;
|
||||||
|
final String symlink;
|
||||||
|
final String stickydir;
|
||||||
|
|
||||||
|
StatOutput(String doesNotExist, String directory, String file,
|
||||||
|
String symlink, String stickydir) {
|
||||||
|
this.doesNotExist = doesNotExist;
|
||||||
|
this.directory = directory;
|
||||||
|
this.file = file;
|
||||||
|
this.symlink = symlink;
|
||||||
|
this.stickydir = stickydir;
|
||||||
|
}
|
||||||
|
|
||||||
|
void test() throws Exception {
|
||||||
|
BufferedReader br;
|
||||||
|
FileStatus status;
|
||||||
|
|
||||||
|
try {
|
||||||
|
br = new BufferedReader(new StringReader(doesNotExist));
|
||||||
|
stat.parseExecResult(br);
|
||||||
|
} catch (FileNotFoundException e) {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
|
||||||
|
br = new BufferedReader(new StringReader(directory));
|
||||||
|
stat.parseExecResult(br);
|
||||||
|
status = stat.getFileStatusForTesting();
|
||||||
|
assertTrue(status.isDirectory());
|
||||||
|
|
||||||
|
br = new BufferedReader(new StringReader(file));
|
||||||
|
stat.parseExecResult(br);
|
||||||
|
status = stat.getFileStatusForTesting();
|
||||||
|
assertTrue(status.isFile());
|
||||||
|
|
||||||
|
br = new BufferedReader(new StringReader(symlink));
|
||||||
|
stat.parseExecResult(br);
|
||||||
|
status = stat.getFileStatusForTesting();
|
||||||
|
assertTrue(status.isSymlink());
|
||||||
|
|
||||||
|
br = new BufferedReader(new StringReader(stickydir));
|
||||||
|
stat.parseExecResult(br);
|
||||||
|
status = stat.getFileStatusForTesting();
|
||||||
|
assertTrue(status.isDirectory());
|
||||||
|
assertTrue(status.getPermission().getStickyBit());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(timeout=10000)
|
||||||
|
public void testStatLinux() throws Exception {
|
||||||
|
StatOutput linux = new StatOutput(
|
||||||
|
"stat: cannot stat `watermelon': No such file or directory",
|
||||||
|
"4096,directory,1373584236,1373586485,755,andrew,root,`.'",
|
||||||
|
"0,regular empty file,1373584228,1373584228,644,andrew,andrew,`target'",
|
||||||
|
"6,symbolic link,1373584236,1373584236,777,andrew,andrew,`link' -> `target'",
|
||||||
|
"4096,directory,1374622334,1375124212,1755,andrew,andrew,`stickydir'");
|
||||||
|
linux.test();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(timeout=10000)
|
||||||
|
public void testStatFreeBSD() throws Exception {
|
||||||
|
StatOutput freebsd = new StatOutput(
|
||||||
|
"stat: symtest/link: stat: No such file or directory",
|
||||||
|
"512,Directory,1373583695,1373583669,40755,awang,awang,`link' -> `'",
|
||||||
|
"0,Regular File,1373508937,1373508937,100644,awang,awang,`link' -> `'",
|
||||||
|
"6,Symbolic Link,1373508941,1373508941,120755,awang,awang,`link' -> `target'",
|
||||||
|
"512,Directory,1375139537,1375139537,41755,awang,awang,`link' -> `'");
|
||||||
|
freebsd.test();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(timeout=10000)
|
||||||
|
public void testStatFileNotFound() throws Exception {
|
||||||
|
try {
|
||||||
|
stat.getFileStatus();
|
||||||
|
fail("Expected FileNotFoundException");
|
||||||
|
} catch (FileNotFoundException e) {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -31,6 +31,7 @@ import java.net.URISyntaxException;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.util.Shell;
|
import org.apache.hadoop.util.Shell;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -134,6 +135,7 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest {
|
||||||
Path fileAbs = new Path(testBaseDir1()+"/file");
|
Path fileAbs = new Path(testBaseDir1()+"/file");
|
||||||
Path fileQual = new Path(testURI().toString(), fileAbs);
|
Path fileQual = new Path(testURI().toString(), fileAbs);
|
||||||
Path link = new Path(testBaseDir1()+"/linkToFile");
|
Path link = new Path(testBaseDir1()+"/linkToFile");
|
||||||
|
Path linkQual = new Path(testURI().toString(), link.toString());
|
||||||
wrapper.createSymlink(fileAbs, link, false);
|
wrapper.createSymlink(fileAbs, link, false);
|
||||||
// Deleting the link using FileContext currently fails because
|
// Deleting the link using FileContext currently fails because
|
||||||
// resolve looks up LocalFs rather than RawLocalFs for the path
|
// resolve looks up LocalFs rather than RawLocalFs for the path
|
||||||
|
@ -151,18 +153,15 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest {
|
||||||
// Expected. File's exists method returns false for dangling links
|
// Expected. File's exists method returns false for dangling links
|
||||||
}
|
}
|
||||||
// We can stat a dangling link
|
// We can stat a dangling link
|
||||||
|
UserGroupInformation user = UserGroupInformation.getCurrentUser();
|
||||||
FileStatus fsd = wrapper.getFileLinkStatus(link);
|
FileStatus fsd = wrapper.getFileLinkStatus(link);
|
||||||
assertEquals(fileQual, fsd.getSymlink());
|
assertEquals(fileQual, fsd.getSymlink());
|
||||||
assertTrue(fsd.isSymlink());
|
assertTrue(fsd.isSymlink());
|
||||||
assertFalse(fsd.isDirectory());
|
assertFalse(fsd.isDirectory());
|
||||||
assertEquals("", fsd.getOwner());
|
assertEquals(user.getUserName(), fsd.getOwner());
|
||||||
assertEquals("", fsd.getGroup());
|
// Compare against user's primary group
|
||||||
assertEquals(link, fsd.getPath());
|
assertEquals(user.getGroupNames()[0], fsd.getGroup());
|
||||||
assertEquals(0, fsd.getLen());
|
assertEquals(linkQual, fsd.getPath());
|
||||||
assertEquals(0, fsd.getBlockSize());
|
|
||||||
assertEquals(0, fsd.getReplication());
|
|
||||||
assertEquals(0, fsd.getAccessTime());
|
|
||||||
assertEquals(FsPermission.getDefault(), fsd.getPermission());
|
|
||||||
// Accessing the link
|
// Accessing the link
|
||||||
try {
|
try {
|
||||||
readFile(link);
|
readFile(link);
|
||||||
|
|
|
@ -116,7 +116,8 @@ public class HttpServerFunctionalTest extends Assert {
|
||||||
public static HttpServer createServer(String host, int port)
|
public static HttpServer createServer(String host, int port)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
prepareTestWebapp();
|
prepareTestWebapp();
|
||||||
return new HttpServer(TEST, host, port, true);
|
return new HttpServer.Builder().setName(TEST).setBindAddress(host)
|
||||||
|
.setPort(port).setFindPort(true).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -126,7 +127,8 @@ public class HttpServerFunctionalTest extends Assert {
|
||||||
* @throws IOException if it could not be created
|
* @throws IOException if it could not be created
|
||||||
*/
|
*/
|
||||||
public static HttpServer createServer(String webapp) throws IOException {
|
public static HttpServer createServer(String webapp) throws IOException {
|
||||||
return new HttpServer(webapp, "0.0.0.0", 0, true);
|
return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
|
||||||
|
.setPort(0).setFindPort(true).build();
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Create an HttpServer instance for the given webapp
|
* Create an HttpServer instance for the given webapp
|
||||||
|
@ -137,13 +139,16 @@ public class HttpServerFunctionalTest extends Assert {
|
||||||
*/
|
*/
|
||||||
public static HttpServer createServer(String webapp, Configuration conf)
|
public static HttpServer createServer(String webapp, Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new HttpServer(webapp, "0.0.0.0", 0, true, conf);
|
return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
|
||||||
|
.setPort(0).setFindPort(true).setConf(conf).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
|
public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new HttpServer(webapp, "0.0.0.0", 0, true, conf, adminsAcl);
|
return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
|
||||||
|
.setPort(0).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an HttpServer instance for the given webapp
|
* Create an HttpServer instance for the given webapp
|
||||||
* @param webapp the webapp to work with
|
* @param webapp the webapp to work with
|
||||||
|
@ -154,7 +159,8 @@ public class HttpServerFunctionalTest extends Assert {
|
||||||
*/
|
*/
|
||||||
public static HttpServer createServer(String webapp, Configuration conf,
|
public static HttpServer createServer(String webapp, Configuration conf,
|
||||||
String[] pathSpecs) throws IOException {
|
String[] pathSpecs) throws IOException {
|
||||||
return new HttpServer(webapp, "0.0.0.0", 0, true, conf, pathSpecs);
|
return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
|
||||||
|
.setPort(0).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -121,7 +121,6 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
||||||
|
|
||||||
@SuppressWarnings("serial")
|
@SuppressWarnings("serial")
|
||||||
public static class LongHeaderServlet extends HttpServlet {
|
public static class LongHeaderServlet extends HttpServlet {
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
@Override
|
||||||
public void doGet(HttpServletRequest request,
|
public void doGet(HttpServletRequest request,
|
||||||
HttpServletResponse response
|
HttpServletResponse response
|
||||||
|
@ -362,7 +361,8 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
||||||
MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
|
MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
|
||||||
MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
|
MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
|
||||||
|
|
||||||
HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf);
|
HttpServer myServer = new HttpServer.Builder().setName("test")
|
||||||
|
.setBindAddress("0.0.0.0").setPort(0).setFindPort(true).build();
|
||||||
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
|
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
|
||||||
myServer.start();
|
myServer.start();
|
||||||
int port = myServer.getPort();
|
int port = myServer.getPort();
|
||||||
|
@ -403,8 +403,9 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
||||||
MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
|
MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
|
||||||
MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
|
MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
|
||||||
|
|
||||||
HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf,
|
HttpServer myServer = new HttpServer.Builder().setName("test")
|
||||||
new AccessControlList("userA,userB groupC,groupD"));
|
.setBindAddress("0.0.0.0").setPort(0).setFindPort(true).setConf(conf)
|
||||||
|
.setACL(new AccessControlList("userA,userB groupC,groupD")).build();
|
||||||
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
|
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
|
||||||
myServer.start();
|
myServer.start();
|
||||||
int port = myServer.getPort();
|
int port = myServer.getPort();
|
||||||
|
|
|
@ -42,7 +42,10 @@ public class TestLogLevel extends TestCase {
|
||||||
log.error("log.error1");
|
log.error("log.error1");
|
||||||
assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
|
assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
|
||||||
|
|
||||||
HttpServer server = new HttpServer("..", "localhost", 22222, true);
|
HttpServer server = new HttpServer.Builder().setName("..")
|
||||||
|
.setBindAddress("localhost").setPort(22222).setFindPort(true)
|
||||||
|
.build();
|
||||||
|
|
||||||
server.start();
|
server.start();
|
||||||
int port = server.getPort();
|
int port = server.getPort();
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
|
||||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenInfo;
|
import org.apache.hadoop.security.token.TokenInfo;
|
||||||
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager;
|
import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager;
|
||||||
import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier;
|
import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier;
|
||||||
|
@ -58,7 +59,7 @@ public class TestDoAsEffectiveUser {
|
||||||
GROUP2_NAME };
|
GROUP2_NAME };
|
||||||
private static final String ADDRESS = "0.0.0.0";
|
private static final String ADDRESS = "0.0.0.0";
|
||||||
private TestProtocol proxy;
|
private TestProtocol proxy;
|
||||||
private static Configuration masterConf = new Configuration();
|
private static final Configuration masterConf = new Configuration();
|
||||||
|
|
||||||
|
|
||||||
public static final Log LOG = LogFactory
|
public static final Log LOG = LogFactory
|
||||||
|
@ -70,6 +71,10 @@ public class TestDoAsEffectiveUser {
|
||||||
"RULE:[2:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//" +
|
"RULE:[2:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//" +
|
||||||
"RULE:[1:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
|
"RULE:[1:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
|
||||||
+ "DEFAULT");
|
+ "DEFAULT");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setMasterConf() {
|
||||||
UserGroupInformation.setConfiguration(masterConf);
|
UserGroupInformation.setConfiguration(masterConf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -296,7 +296,11 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>RegexpComparator</type>
|
<type>RegexpComparator</type>
|
||||||
<expected-output>^( |\t)*modification times, ownership and the mode.( )*</expected-output>
|
<expected-output>^( |\t)*modification times, ownership and the mode. Passing -f( )*</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>RegexpComparator</type>
|
||||||
|
<expected-output>^( |\t)*overwrites the destination if it already exists.( )*</expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -400,7 +404,11 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>RegexpComparator</type>
|
<type>RegexpComparator</type>
|
||||||
<expected-output>^( |\t)*ownership and the mode.( )*</expected-output>
|
<expected-output>^( |\t)*ownership and the mode. Passing -f overwrites( )*</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>RegexpComparator</type>
|
||||||
|
<expected-output>^( |\t)*the destination if it already exists.( )*</expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
|
|
@ -38,7 +38,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.directory.server</groupId>
|
<groupId>org.apache.directory.server</groupId>
|
||||||
<artifactId>apacheds-all</artifactId>
|
<artifactId>apacheds-all</artifactId>
|
||||||
<version>2.0.0-M14</version>
|
<version>2.0.0-M15</version>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
|
@ -1,42 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.directory.server.kerberos.shared.keytab;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
|
|
||||||
//This is a hack for ApacheDS 2.0.0-M14 to be able to create
|
|
||||||
//keytab files with more than one principal.
|
|
||||||
//It needs to be in this package because the KeytabEncoder class is package
|
|
||||||
// private.
|
|
||||||
//This class can be removed once jira DIRSERVER-1882
|
|
||||||
// (https://issues.apache.org/jira/browse/DIRSERVER-1882) solved
|
|
||||||
public class HackedKeytab extends Keytab {
|
|
||||||
|
|
||||||
private byte[] keytabVersion = VERSION_52;
|
|
||||||
|
|
||||||
public void write( File file, int principalCount ) throws IOException
|
|
||||||
{
|
|
||||||
HackedKeytabEncoder writer = new HackedKeytabEncoder();
|
|
||||||
ByteBuffer buffer = writer.write( keytabVersion, getEntries(),
|
|
||||||
principalCount );
|
|
||||||
writeFile( buffer, file );
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,121 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.directory.server.kerberos.shared.keytab;
|
|
||||||
|
|
||||||
import org.apache.directory.shared.kerberos.components.EncryptionKey;
|
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
//This is a hack for ApacheDS 2.0.0-M14 to be able to create
|
|
||||||
//keytab files with more than one principal.
|
|
||||||
//It needs to be in this package because the KeytabEncoder class is package
|
|
||||||
// private.
|
|
||||||
//This class can be removed once jira DIRSERVER-1882
|
|
||||||
// (https://issues.apache.org/jira/browse/DIRSERVER-1882) solved
|
|
||||||
class HackedKeytabEncoder extends KeytabEncoder {
|
|
||||||
|
|
||||||
ByteBuffer write( byte[] keytabVersion, List<KeytabEntry> entries,
|
|
||||||
int principalCount )
|
|
||||||
{
|
|
||||||
ByteBuffer buffer = ByteBuffer.allocate( 512 * principalCount);
|
|
||||||
putKeytabVersion(buffer, keytabVersion);
|
|
||||||
putKeytabEntries( buffer, entries );
|
|
||||||
buffer.flip();
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void putKeytabVersion( ByteBuffer buffer, byte[] version )
|
|
||||||
{
|
|
||||||
buffer.put( version );
|
|
||||||
}
|
|
||||||
|
|
||||||
private void putKeytabEntries( ByteBuffer buffer, List<KeytabEntry> entries )
|
|
||||||
{
|
|
||||||
Iterator<KeytabEntry> iterator = entries.iterator();
|
|
||||||
|
|
||||||
while ( iterator.hasNext() )
|
|
||||||
{
|
|
||||||
ByteBuffer entryBuffer = putKeytabEntry( iterator.next() );
|
|
||||||
int size = entryBuffer.position();
|
|
||||||
|
|
||||||
entryBuffer.flip();
|
|
||||||
|
|
||||||
buffer.putInt( size );
|
|
||||||
buffer.put( entryBuffer );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private ByteBuffer putKeytabEntry( KeytabEntry entry )
|
|
||||||
{
|
|
||||||
ByteBuffer buffer = ByteBuffer.allocate( 100 );
|
|
||||||
|
|
||||||
putPrincipalName( buffer, entry.getPrincipalName() );
|
|
||||||
|
|
||||||
buffer.putInt( ( int ) entry.getPrincipalType() );
|
|
||||||
|
|
||||||
buffer.putInt( ( int ) ( entry.getTimeStamp().getTime() / 1000 ) );
|
|
||||||
|
|
||||||
buffer.put( entry.getKeyVersion() );
|
|
||||||
|
|
||||||
putKeyBlock( buffer, entry.getKey() );
|
|
||||||
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void putPrincipalName( ByteBuffer buffer, String principalName )
|
|
||||||
{
|
|
||||||
String[] split = principalName.split("@");
|
|
||||||
String nameComponent = split[0];
|
|
||||||
String realm = split[1];
|
|
||||||
|
|
||||||
String[] nameComponents = nameComponent.split( "/" );
|
|
||||||
|
|
||||||
// increment for v1
|
|
||||||
buffer.putShort( ( short ) nameComponents.length );
|
|
||||||
|
|
||||||
putCountedString( buffer, realm );
|
|
||||||
// write components
|
|
||||||
|
|
||||||
for ( int ii = 0; ii < nameComponents.length; ii++ )
|
|
||||||
{
|
|
||||||
putCountedString( buffer, nameComponents[ii] );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void putKeyBlock( ByteBuffer buffer, EncryptionKey key )
|
|
||||||
{
|
|
||||||
buffer.putShort( ( short ) key.getKeyType().getValue() );
|
|
||||||
putCountedBytes( buffer, key.getKeyValue() );
|
|
||||||
}
|
|
||||||
|
|
||||||
private void putCountedString( ByteBuffer buffer, String string )
|
|
||||||
{
|
|
||||||
byte[] data = string.getBytes();
|
|
||||||
buffer.putShort( ( short ) data.length );
|
|
||||||
buffer.put( data );
|
|
||||||
}
|
|
||||||
|
|
||||||
private void putCountedBytes( ByteBuffer buffer, byte[] data )
|
|
||||||
{
|
|
||||||
buffer.putShort( ( short ) data.length );
|
|
||||||
buffer.put( data );
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -37,7 +37,7 @@ import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
|
||||||
import org.apache.directory.server.core.partition.ldif.LdifPartition;
|
import org.apache.directory.server.core.partition.ldif.LdifPartition;
|
||||||
import org.apache.directory.server.kerberos.kdc.KdcServer;
|
import org.apache.directory.server.kerberos.kdc.KdcServer;
|
||||||
import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
|
import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.HackedKeytab;
|
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
||||||
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
|
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
|
||||||
import org.apache.directory.server.protocol.shared.transport.UdpTransport;
|
import org.apache.directory.server.protocol.shared.transport.UdpTransport;
|
||||||
|
@ -514,7 +514,7 @@ public class MiniKdc {
|
||||||
public void createPrincipal(File keytabFile, String ... principals)
|
public void createPrincipal(File keytabFile, String ... principals)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String generatedPassword = UUID.randomUUID().toString();
|
String generatedPassword = UUID.randomUUID().toString();
|
||||||
HackedKeytab keytab = new HackedKeytab();
|
Keytab keytab = new Keytab();
|
||||||
List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
|
List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
|
||||||
for (String principal : principals) {
|
for (String principal : principals) {
|
||||||
createPrincipal(principal, generatedPassword);
|
createPrincipal(principal, generatedPassword);
|
||||||
|
@ -529,6 +529,6 @@ public class MiniKdc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
keytab.setEntries(entries);
|
keytab.setEntries(entries);
|
||||||
keytab.write(keytabFile, principals.length);
|
keytab.write(keytabFile);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -137,7 +137,7 @@ public class TestMiniKdc extends KerberosSecurityTestcase {
|
||||||
subject.getPrincipals().iterator().next().getClass());
|
subject.getPrincipals().iterator().next().getClass());
|
||||||
Assert.assertEquals(principal + "@" + kdc.getRealm(),
|
Assert.assertEquals(principal + "@" + kdc.getRealm(),
|
||||||
subject.getPrincipals().iterator().next().getName());
|
subject.getPrincipals().iterator().next().getName());
|
||||||
loginContext.login();
|
loginContext.logout();
|
||||||
|
|
||||||
//server login
|
//server login
|
||||||
subject = new Subject(false, principals, new HashSet<Object>(),
|
subject = new Subject(false, principals, new HashSet<Object>(),
|
||||||
|
@ -151,7 +151,7 @@ public class TestMiniKdc extends KerberosSecurityTestcase {
|
||||||
subject.getPrincipals().iterator().next().getClass());
|
subject.getPrincipals().iterator().next().getClass());
|
||||||
Assert.assertEquals(principal + "@" + kdc.getRealm(),
|
Assert.assertEquals(principal + "@" + kdc.getRealm(),
|
||||||
subject.getPrincipals().iterator().next().getName());
|
subject.getPrincipals().iterator().next().getName());
|
||||||
loginContext.login();
|
loginContext.logout();
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
if (loginContext != null) {
|
if (loginContext != null) {
|
||||||
|
|
|
@ -17,12 +17,14 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
import com.google.common.collect.ObjectArrays;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* READDIR3 Response
|
* READDIR3 Response
|
||||||
*/
|
*/
|
||||||
|
@ -56,12 +58,11 @@ public class READDIR3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class DirList3 {
|
public static class DirList3 {
|
||||||
final Entry3 entries[];
|
final List<Entry3> entries;
|
||||||
final boolean eof;
|
final boolean eof;
|
||||||
|
|
||||||
public DirList3(Entry3[] entries, boolean eof) {
|
public DirList3(Entry3[] entries, boolean eof) {
|
||||||
this.entries = ObjectArrays.newArray(entries, entries.length);
|
this.entries = Collections.unmodifiableList(Arrays.asList(entries));
|
||||||
System.arraycopy(this.entries, 0, entries, 0, entries.length);
|
|
||||||
this.eof = eof;
|
this.eof = eof;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -102,12 +103,11 @@ public class READDIR3Response extends NFS3Response {
|
||||||
|
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
xdr.writeLongAsHyper(cookieVerf);
|
xdr.writeLongAsHyper(cookieVerf);
|
||||||
Entry3[] f = dirList.entries;
|
for (Entry3 e : dirList.entries) {
|
||||||
for (int i = 0; i < f.length; i++) {
|
|
||||||
xdr.writeBoolean(true); // Value follows
|
xdr.writeBoolean(true); // Value follows
|
||||||
xdr.writeLongAsHyper(f[i].getFileId());
|
xdr.writeLongAsHyper(e.getFileId());
|
||||||
xdr.writeString(f[i].getName());
|
xdr.writeString(e.getName());
|
||||||
xdr.writeLongAsHyper(f[i].getCookie());
|
xdr.writeLongAsHyper(e.getCookie());
|
||||||
}
|
}
|
||||||
|
|
||||||
xdr.writeBoolean(false);
|
xdr.writeBoolean(false);
|
||||||
|
|
|
@ -17,13 +17,15 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
import com.google.common.collect.ObjectArrays;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* READDIRPLUS3 Response
|
* READDIRPLUS3 Response
|
||||||
*/
|
*/
|
||||||
|
@ -60,16 +62,15 @@ public class READDIRPLUS3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class DirListPlus3 {
|
public static class DirListPlus3 {
|
||||||
EntryPlus3 entries[];
|
List<EntryPlus3> entries;
|
||||||
boolean eof;
|
boolean eof;
|
||||||
|
|
||||||
public DirListPlus3(EntryPlus3[] entries, boolean eof) {
|
public DirListPlus3(EntryPlus3[] entries, boolean eof) {
|
||||||
this.entries = ObjectArrays.newArray(entries, entries.length);
|
this.entries = Collections.unmodifiableList(Arrays.asList(entries));
|
||||||
System.arraycopy(this.entries, 0, entries, 0, entries.length);
|
|
||||||
this.eof = eof;
|
this.eof = eof;
|
||||||
}
|
}
|
||||||
|
|
||||||
EntryPlus3[] getEntries() {
|
List<EntryPlus3> getEntries() {
|
||||||
return entries;
|
return entries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,10 +102,9 @@ public class READDIRPLUS3Response extends NFS3Response {
|
||||||
|
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
out.writeLongAsHyper(cookieVerf);
|
out.writeLongAsHyper(cookieVerf);
|
||||||
EntryPlus3[] f = dirListPlus.getEntries();
|
for (EntryPlus3 f : dirListPlus.getEntries()) {
|
||||||
for (int i = 0; i < f.length; i++) {
|
|
||||||
out.writeBoolean(true); // next
|
out.writeBoolean(true); // next
|
||||||
f[i].seralize(out);
|
f.seralize(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
out.writeBoolean(false);
|
out.writeBoolean(false);
|
||||||
|
|
|
@ -77,7 +77,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
<version>3.4.2</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
|
@ -32,7 +32,6 @@ import java.util.concurrent.locks.ReentrantLock;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
|
||||||
import org.apache.hadoop.hdfs.DFSClient;
|
import org.apache.hadoop.hdfs.DFSClient;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
|
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
|
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
|
||||||
|
@ -70,7 +69,7 @@ class OpenFileCtx {
|
||||||
// The stream write-back status. True means one thread is doing write back.
|
// The stream write-back status. True means one thread is doing write back.
|
||||||
private boolean asyncStatus;
|
private boolean asyncStatus;
|
||||||
|
|
||||||
private final FSDataOutputStream fos;
|
private final HdfsDataOutputStream fos;
|
||||||
private final Nfs3FileAttributes latestAttr;
|
private final Nfs3FileAttributes latestAttr;
|
||||||
private long nextOffset;
|
private long nextOffset;
|
||||||
|
|
||||||
|
@ -114,7 +113,7 @@ class OpenFileCtx {
|
||||||
return nonSequentialWriteInMemory;
|
return nonSequentialWriteInMemory;
|
||||||
}
|
}
|
||||||
|
|
||||||
OpenFileCtx(FSDataOutputStream fos, Nfs3FileAttributes latestAttr,
|
OpenFileCtx(HdfsDataOutputStream fos, Nfs3FileAttributes latestAttr,
|
||||||
String dumpFilePath) {
|
String dumpFilePath) {
|
||||||
this.fos = fos;
|
this.fos = fos;
|
||||||
this.latestAttr = latestAttr;
|
this.latestAttr = latestAttr;
|
||||||
|
@ -438,7 +437,7 @@ class OpenFileCtx {
|
||||||
FSDataInputStream fis = null;
|
FSDataInputStream fis = null;
|
||||||
try {
|
try {
|
||||||
// Sync file data and length to avoid partial read failure
|
// Sync file data and length to avoid partial read failure
|
||||||
((HdfsDataOutputStream) fos).hsync(EnumSet.of(SyncFlag.UPDATE_LENGTH));
|
fos.hsync(EnumSet.of(SyncFlag.UPDATE_LENGTH));
|
||||||
|
|
||||||
fis = new FSDataInputStream(dfsClient.open(path));
|
fis = new FSDataInputStream(dfsClient.open(path));
|
||||||
readCount = fis.read(offset, readbuffer, 0, count);
|
readCount = fis.read(offset, readbuffer, 0, count);
|
||||||
|
@ -527,7 +526,7 @@ class OpenFileCtx {
|
||||||
int ret = COMMIT_WAIT;
|
int ret = COMMIT_WAIT;
|
||||||
try {
|
try {
|
||||||
// Sync file data and length
|
// Sync file data and length
|
||||||
((HdfsDataOutputStream) fos).hsync(EnumSet.of(SyncFlag.UPDATE_LENGTH));
|
fos.hsync(EnumSet.of(SyncFlag.UPDATE_LENGTH));
|
||||||
// Nothing to do for metadata since attr related change is pass-through
|
// Nothing to do for metadata since attr related change is pass-through
|
||||||
ret = COMMIT_FINISHED;
|
ret = COMMIT_FINISHED;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CreateFlag;
|
import org.apache.hadoop.fs.CreateFlag;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
|
||||||
import org.apache.hadoop.fs.Options;
|
import org.apache.hadoop.fs.Options;
|
||||||
import org.apache.hadoop.fs.FileSystem.Statistics;
|
import org.apache.hadoop.fs.FileSystem.Statistics;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
@ -629,7 +629,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
return new CREATE3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new CREATE3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
}
|
}
|
||||||
|
|
||||||
FSDataOutputStream fos = null;
|
HdfsDataOutputStream fos = null;
|
||||||
String dirFileIdPath = Nfs3Utils.getFileIdPath(dirHandle);
|
String dirFileIdPath = Nfs3Utils.getFileIdPath(dirHandle);
|
||||||
WccAttr preOpDirAttr = null;
|
WccAttr preOpDirAttr = null;
|
||||||
Nfs3FileAttributes postOpObjAttr = null;
|
Nfs3FileAttributes postOpObjAttr = null;
|
||||||
|
@ -652,7 +652,8 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
EnumSet<CreateFlag> flag = (createMode != Nfs3Constant.CREATE_EXCLUSIVE) ? EnumSet
|
EnumSet<CreateFlag> flag = (createMode != Nfs3Constant.CREATE_EXCLUSIVE) ? EnumSet
|
||||||
.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) : EnumSet
|
.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) : EnumSet
|
||||||
.of(CreateFlag.CREATE);
|
.of(CreateFlag.CREATE);
|
||||||
fos = new FSDataOutputStream(dfsClient.create(fileIdPath, permission,
|
|
||||||
|
fos = new HdfsDataOutputStream(dfsClient.create(fileIdPath, permission,
|
||||||
flag, false, replication, blockSize, null, bufferSize, null),
|
flag, false, replication, blockSize, null, bufferSize, null),
|
||||||
statistics);
|
statistics);
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,9 @@ Trunk (Unreleased)
|
||||||
|
|
||||||
HDFS-3034. Remove the deprecated DFSOutputStream.sync() method. (szetszwo)
|
HDFS-3034. Remove the deprecated DFSOutputStream.sync() method. (szetszwo)
|
||||||
|
|
||||||
|
HDFS-5079. Cleaning up NNHAStatusHeartbeat.State from
|
||||||
|
DatanodeProtocolProtos. (Tao Luo via shv)
|
||||||
|
|
||||||
NEW FEATURES
|
NEW FEATURES
|
||||||
|
|
||||||
HDFS-3125. Add JournalService to enable Journal Daemon. (suresh)
|
HDFS-3125. Add JournalService to enable Journal Daemon. (suresh)
|
||||||
|
@ -117,9 +120,6 @@ Trunk (Unreleased)
|
||||||
|
|
||||||
HDFS-4904. Remove JournalService. (Arpit Agarwal via cnauroth)
|
HDFS-4904. Remove JournalService. (Arpit Agarwal via cnauroth)
|
||||||
|
|
||||||
HDFS-5004. Add additional JMX bean for NameNode status data
|
|
||||||
(Trevor Lorimer via cos)
|
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
@ -256,6 +256,12 @@ Release 2.3.0 - UNRELEASED
|
||||||
HDFS-4817. Make HDFS advisory caching configurable on a per-file basis.
|
HDFS-4817. Make HDFS advisory caching configurable on a per-file basis.
|
||||||
(Colin Patrick McCabe)
|
(Colin Patrick McCabe)
|
||||||
|
|
||||||
|
HDFS-5004. Add additional JMX bean for NameNode status data
|
||||||
|
(Trevor Lorimer via cos)
|
||||||
|
|
||||||
|
HDFS-5068. Convert NNThroughputBenchmark to a Tool to allow generic options.
|
||||||
|
(shv)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
@ -267,6 +273,9 @@ Release 2.3.0 - UNRELEASED
|
||||||
|
|
||||||
HDFS-5065. TestSymlinkHdfsDisable fails on Windows. (ivanmi)
|
HDFS-5065. TestSymlinkHdfsDisable fails on Windows. (ivanmi)
|
||||||
|
|
||||||
|
HDFS-4816. transitionToActive blocks if the SBN is doing checkpoint image
|
||||||
|
transfer. (Andrew Wang)
|
||||||
|
|
||||||
Release 2.1.1-beta - UNRELEASED
|
Release 2.1.1-beta - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -279,6 +288,17 @@ Release 2.1.1-beta - UNRELEASED
|
||||||
|
|
||||||
HDFS-4763 Add script changes/utility for starting NFS gateway (brandonli)
|
HDFS-4763 Add script changes/utility for starting NFS gateway (brandonli)
|
||||||
|
|
||||||
|
HDFS-5076 Add MXBean methods to query NN's transaction information and
|
||||||
|
JournalNode's journal status. (jing9)
|
||||||
|
|
||||||
|
HDFS-5104 Support dotdot name in NFS LOOKUP operation (brandonli)
|
||||||
|
|
||||||
|
HDFS-5107 Fix array copy error in Readdir and Readdirplus responses
|
||||||
|
(brandonli)
|
||||||
|
|
||||||
|
HDFS-5110 Change FSDataOutputStream to HdfsDataOutputStream for opened
|
||||||
|
streams to fix type cast error. (brandonli)
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
|
|
||||||
HDFS-4513. Clarify in the WebHDFS REST API that all JSON respsonses may
|
HDFS-4513. Clarify in the WebHDFS REST API that all JSON respsonses may
|
||||||
|
@ -296,6 +316,12 @@ Release 2.1.1-beta - UNRELEASED
|
||||||
HDFS-5047. Supress logging of full stack trace of quota and lease
|
HDFS-5047. Supress logging of full stack trace of quota and lease
|
||||||
exceptions. (Robert Parker via kihwal)
|
exceptions. (Robert Parker via kihwal)
|
||||||
|
|
||||||
|
HDFS-2933. Improve DataNode Web UI Index Page. (Vivek Ganesan via
|
||||||
|
Arpit Agarwal)
|
||||||
|
|
||||||
|
HDFS-5111. Remove duplicated error message for snapshot commands when
|
||||||
|
processing invalid arguments. (jing9)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
@ -312,7 +338,44 @@ Release 2.1.1-beta - UNRELEASED
|
||||||
HDFS-5091. Support for spnego keytab separate from the JournalNode keytab
|
HDFS-5091. Support for spnego keytab separate from the JournalNode keytab
|
||||||
for secure HA. (jing9)
|
for secure HA. (jing9)
|
||||||
|
|
||||||
Release 2.1.0-beta - 2013-08-06
|
HDFS-5051. nn fails to download checkpointed image from snn in some
|
||||||
|
setups. (Vinay and suresh via suresh)
|
||||||
|
|
||||||
|
HDFS-4898. BlockPlacementPolicyWithNodeGroup.chooseRemoteRack() fails to
|
||||||
|
properly fallback to local rack. (szetszwo)
|
||||||
|
|
||||||
|
HDFS-4632. globStatus using backslash for escaping does not work on Windows.
|
||||||
|
(Chuan Liu via cnauroth)
|
||||||
|
|
||||||
|
HDFS-5093. TestGlobPaths should re-use the MiniDFSCluster to avoid failure
|
||||||
|
on Windows. (Chuan Liu via cnauroth)
|
||||||
|
|
||||||
|
HDFS-5080. BootstrapStandby not working with QJM when the existing NN is
|
||||||
|
active. (jing9)
|
||||||
|
|
||||||
|
HDFS-5099. Namenode#copyEditLogSegmentsToSharedDir should close
|
||||||
|
EditLogInputStreams upon finishing. (Chuan Liu via cnauroth)
|
||||||
|
|
||||||
|
HDFS-2994. If lease soft limit is recovered successfully
|
||||||
|
the append can fail. (Tao Luo via shv)
|
||||||
|
|
||||||
|
HDFS-5100. TestNamenodeRetryCache fails on Windows due to incorrect cleanup.
|
||||||
|
(Chuan Liu via cnauroth)
|
||||||
|
|
||||||
|
HDFS-5103. TestDirectoryScanner fails on Windows. (Chuan Liu via cnauroth)
|
||||||
|
|
||||||
|
HDFS-5102. Snapshot names should not be allowed to contain slash characters.
|
||||||
|
(jing9)
|
||||||
|
|
||||||
|
HDFS-5105. TestFsck fails on Windows. (Chuan Liu via arp)
|
||||||
|
|
||||||
|
HDFS-5106. TestDatanodeBlockScanner fails on Windows due to incorrect path
|
||||||
|
format. (Chuan Liu via cnauroth)
|
||||||
|
|
||||||
|
HDFS-4594. WebHDFS open sets Content-Length header to what is specified by
|
||||||
|
length parameter rather than how much data is actually returned. (cnauroth)
|
||||||
|
|
||||||
|
Release 2.1.0-beta - 2013-08-22
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
<version>3.4.2</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
@ -417,7 +416,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<goal>protoc</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>2.5.0</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
||||||
<param>${basedir}/src/main/proto</param>
|
<param>${basedir}/src/main/proto</param>
|
||||||
|
@ -442,7 +442,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<goal>protoc</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>2.5.0</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
||||||
<param>${basedir}/src/main/proto</param>
|
<param>${basedir}/src/main/proto</param>
|
||||||
|
@ -464,7 +465,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<goal>protoc</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>2.5.0</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
||||||
<param>${basedir}/src/main/proto</param>
|
<param>${basedir}/src/main/proto</param>
|
||||||
|
@ -486,7 +488,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<goal>protoc</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>2.5.0</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
||||||
<param>${basedir}/src/main/proto</param>
|
<param>${basedir}/src/main/proto</param>
|
||||||
|
|
|
@ -103,7 +103,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<goal>protoc</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocVersion>2.5.0</protocVersion>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<imports>
|
<imports>
|
||||||
<param>${basedir}/../../../../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
<param>${basedir}/../../../../../hadoop-common-project/hadoop-common/src/main/proto</param>
|
||||||
<param>${basedir}/../../../../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto</param>
|
<param>${basedir}/../../../../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto</param>
|
||||||
|
|
|
@ -70,6 +70,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB;
|
import org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||||
import org.apache.hadoop.ipc.RPC;
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
@ -204,13 +205,20 @@ public class DFSUtil {
|
||||||
String[] components = StringUtils.split(src, '/');
|
String[] components = StringUtils.split(src, '/');
|
||||||
for (int i = 0; i < components.length; i++) {
|
for (int i = 0; i < components.length; i++) {
|
||||||
String element = components[i];
|
String element = components[i];
|
||||||
if (element.equals("..") ||
|
if (element.equals(".") ||
|
||||||
element.equals(".") ||
|
|
||||||
(element.indexOf(":") >= 0) ||
|
(element.indexOf(":") >= 0) ||
|
||||||
(element.indexOf("/") >= 0)) {
|
(element.indexOf("/") >= 0)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
// ".." is allowed in path starting with /.reserved/.inodes
|
||||||
|
if (element.equals("..")) {
|
||||||
|
if (components.length > 4
|
||||||
|
&& components[1].equals(FSDirectory.DOT_RESERVED_STRING)
|
||||||
|
&& components[2].equals(FSDirectory.DOT_INODES_STRING)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
// The string may start or end with a /, but not have
|
// The string may start or end with a /, but not have
|
||||||
// "//" in the middle.
|
// "//" in the middle.
|
||||||
if (element.isEmpty() && i != components.length - 1 &&
|
if (element.isEmpty() && i != components.length - 1 &&
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.hadoop.fs.CreateFlag;
|
||||||
import org.apache.hadoop.fs.FsServerDefaults;
|
import org.apache.hadoop.fs.FsServerDefaults;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
|
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
|
||||||
|
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
||||||
|
@ -1311,10 +1312,10 @@ public class PBHelper {
|
||||||
NNHAStatusHeartbeatProto.newBuilder();
|
NNHAStatusHeartbeatProto.newBuilder();
|
||||||
switch (hb.getState()) {
|
switch (hb.getState()) {
|
||||||
case ACTIVE:
|
case ACTIVE:
|
||||||
builder.setState(NNHAStatusHeartbeatProto.State.ACTIVE);
|
builder.setState(HAServiceProtocolProtos.HAServiceStateProto.ACTIVE);
|
||||||
break;
|
break;
|
||||||
case STANDBY:
|
case STANDBY:
|
||||||
builder.setState(NNHAStatusHeartbeatProto.State.STANDBY);
|
builder.setState(HAServiceProtocolProtos.HAServiceStateProto.STANDBY);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException("Unexpected NNHAStatusHeartbeat.State:" +
|
throw new IllegalArgumentException("Unexpected NNHAStatusHeartbeat.State:" +
|
||||||
|
|
|
@ -109,7 +109,7 @@ interface AsyncLogger {
|
||||||
* Fetch the list of edit logs available on the remote node.
|
* Fetch the list of edit logs available on the remote node.
|
||||||
*/
|
*/
|
||||||
public ListenableFuture<RemoteEditLogManifest> getEditLogManifest(
|
public ListenableFuture<RemoteEditLogManifest> getEditLogManifest(
|
||||||
long fromTxnId, boolean forReading);
|
long fromTxnId, boolean forReading, boolean inProgressOk);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prepare recovery. See the HDFS-3077 design document for details.
|
* Prepare recovery. See the HDFS-3077 design document for details.
|
||||||
|
|
|
@ -262,14 +262,14 @@ class AsyncLoggerSet {
|
||||||
return QuorumCall.create(calls);
|
return QuorumCall.create(calls);
|
||||||
}
|
}
|
||||||
|
|
||||||
public QuorumCall<AsyncLogger, RemoteEditLogManifest>
|
public QuorumCall<AsyncLogger, RemoteEditLogManifest> getEditLogManifest(
|
||||||
getEditLogManifest(long fromTxnId, boolean forReading) {
|
long fromTxnId, boolean forReading, boolean inProgressOk) {
|
||||||
Map<AsyncLogger,
|
Map<AsyncLogger,
|
||||||
ListenableFuture<RemoteEditLogManifest>> calls
|
ListenableFuture<RemoteEditLogManifest>> calls
|
||||||
= Maps.newHashMap();
|
= Maps.newHashMap();
|
||||||
for (AsyncLogger logger : loggers) {
|
for (AsyncLogger logger : loggers) {
|
||||||
ListenableFuture<RemoteEditLogManifest> future =
|
ListenableFuture<RemoteEditLogManifest> future =
|
||||||
logger.getEditLogManifest(fromTxnId, forReading);
|
logger.getEditLogManifest(fromTxnId, forReading, inProgressOk);
|
||||||
calls.put(logger, future);
|
calls.put(logger, future);
|
||||||
}
|
}
|
||||||
return QuorumCall.create(calls);
|
return QuorumCall.create(calls);
|
||||||
|
|
|
@ -519,12 +519,13 @@ public class IPCLoggerChannel implements AsyncLogger {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ListenableFuture<RemoteEditLogManifest> getEditLogManifest(
|
public ListenableFuture<RemoteEditLogManifest> getEditLogManifest(
|
||||||
final long fromTxnId, final boolean forReading) {
|
final long fromTxnId, final boolean forReading,
|
||||||
|
final boolean inProgressOk) {
|
||||||
return executor.submit(new Callable<RemoteEditLogManifest>() {
|
return executor.submit(new Callable<RemoteEditLogManifest>() {
|
||||||
@Override
|
@Override
|
||||||
public RemoteEditLogManifest call() throws IOException {
|
public RemoteEditLogManifest call() throws IOException {
|
||||||
GetEditLogManifestResponseProto ret = getProxy().getEditLogManifest(
|
GetEditLogManifestResponseProto ret = getProxy().getEditLogManifest(
|
||||||
journalId, fromTxnId, forReading);
|
journalId, fromTxnId, forReading, inProgressOk);
|
||||||
// Update the http port, since we need this to build URLs to any of the
|
// Update the http port, since we need this to build URLs to any of the
|
||||||
// returned logs.
|
// returned logs.
|
||||||
httpPort = ret.getHttpPort();
|
httpPort = ret.getHttpPort();
|
||||||
|
|
|
@ -456,7 +456,7 @@ public class QuorumJournalManager implements JournalManager {
|
||||||
long fromTxnId, boolean inProgressOk, boolean forReading) throws IOException {
|
long fromTxnId, boolean inProgressOk, boolean forReading) throws IOException {
|
||||||
|
|
||||||
QuorumCall<AsyncLogger, RemoteEditLogManifest> q =
|
QuorumCall<AsyncLogger, RemoteEditLogManifest> q =
|
||||||
loggers.getEditLogManifest(fromTxnId, forReading);
|
loggers.getEditLogManifest(fromTxnId, forReading, inProgressOk);
|
||||||
Map<AsyncLogger, RemoteEditLogManifest> resps =
|
Map<AsyncLogger, RemoteEditLogManifest> resps =
|
||||||
loggers.waitForWriteQuorum(q, selectInputStreamsTimeoutMs,
|
loggers.waitForWriteQuorum(q, selectInputStreamsTimeoutMs,
|
||||||
"selectInputStreams");
|
"selectInputStreams");
|
||||||
|
@ -480,8 +480,7 @@ public class QuorumJournalManager implements JournalManager {
|
||||||
allStreams.add(elis);
|
allStreams.add(elis);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
JournalSet.chainAndMakeRedundantStreams(
|
JournalSet.chainAndMakeRedundantStreams(streams, allStreams, fromTxnId);
|
||||||
streams, allStreams, fromTxnId, inProgressOk);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -125,10 +125,13 @@ public interface QJournalProtocol {
|
||||||
* @param sinceTxId the first transaction which the client cares about
|
* @param sinceTxId the first transaction which the client cares about
|
||||||
* @param forReading whether or not the caller intends to read from the edit
|
* @param forReading whether or not the caller intends to read from the edit
|
||||||
* logs
|
* logs
|
||||||
|
* @param inProgressOk whether or not to check the in-progress edit log
|
||||||
|
* segment
|
||||||
* @return a list of edit log segments since the given transaction ID.
|
* @return a list of edit log segments since the given transaction ID.
|
||||||
*/
|
*/
|
||||||
public GetEditLogManifestResponseProto getEditLogManifest(
|
public GetEditLogManifestResponseProto getEditLogManifest(String jid,
|
||||||
String jid, long sinceTxId, boolean forReading) throws IOException;
|
long sinceTxId, boolean forReading, boolean inProgressOk)
|
||||||
|
throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Begin the recovery process for a given segment. See the HDFS-3077
|
* Begin the recovery process for a given segment. See the HDFS-3077
|
||||||
|
|
|
@ -203,7 +203,8 @@ public class QJournalProtocolServerSideTranslatorPB implements QJournalProtocolP
|
||||||
return impl.getEditLogManifest(
|
return impl.getEditLogManifest(
|
||||||
request.getJid().getIdentifier(),
|
request.getJid().getIdentifier(),
|
||||||
request.getSinceTxId(),
|
request.getSinceTxId(),
|
||||||
request.getForReading());
|
request.getForReading(),
|
||||||
|
request.getInProgressOk());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ServiceException(e);
|
throw new ServiceException(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -228,13 +228,15 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GetEditLogManifestResponseProto getEditLogManifest(String jid,
|
public GetEditLogManifestResponseProto getEditLogManifest(String jid,
|
||||||
long sinceTxId, boolean forReading) throws IOException {
|
long sinceTxId, boolean forReading, boolean inProgressOk)
|
||||||
|
throws IOException {
|
||||||
try {
|
try {
|
||||||
return rpcProxy.getEditLogManifest(NULL_CONTROLLER,
|
return rpcProxy.getEditLogManifest(NULL_CONTROLLER,
|
||||||
GetEditLogManifestRequestProto.newBuilder()
|
GetEditLogManifestRequestProto.newBuilder()
|
||||||
.setJid(convertJournalId(jid))
|
.setJid(convertJournalId(jid))
|
||||||
.setSinceTxId(sinceTxId)
|
.setSinceTxId(sinceTxId)
|
||||||
.setForReading(forReading)
|
.setForReading(forReading)
|
||||||
|
.setInProgressOk(inProgressOk)
|
||||||
.build());
|
.build());
|
||||||
} catch (ServiceException e) {
|
} catch (ServiceException e) {
|
||||||
throw ProtobufHelper.getRemoteException(e);
|
throw ProtobufHelper.getRemoteException(e);
|
||||||
|
|
|
@ -25,10 +25,9 @@ import java.io.InputStream;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.regex.Matcher;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -36,8 +35,8 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.JournalNotFormattedException;
|
import org.apache.hadoop.hdfs.qjournal.protocol.JournalNotFormattedException;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol;
|
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.JournalOutOfSyncException;
|
import org.apache.hadoop.hdfs.qjournal.protocol.JournalOutOfSyncException;
|
||||||
|
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto;
|
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PersistedRecoveryPaxosData;
|
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PersistedRecoveryPaxosData;
|
||||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
|
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
|
||||||
|
@ -50,6 +49,7 @@ import org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.JournalManager;
|
import org.apache.hadoop.hdfs.server.namenode.JournalManager;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
|
import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||||
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
||||||
import org.apache.hadoop.hdfs.util.AtomicFileOutputStream;
|
import org.apache.hadoop.hdfs.util.AtomicFileOutputStream;
|
||||||
import org.apache.hadoop.hdfs.util.BestEffortLongFile;
|
import org.apache.hadoop.hdfs.util.BestEffortLongFile;
|
||||||
|
@ -630,14 +630,31 @@ class Journal implements Closeable {
|
||||||
* @see QJournalProtocol#getEditLogManifest(String, long)
|
* @see QJournalProtocol#getEditLogManifest(String, long)
|
||||||
*/
|
*/
|
||||||
public RemoteEditLogManifest getEditLogManifest(long sinceTxId,
|
public RemoteEditLogManifest getEditLogManifest(long sinceTxId,
|
||||||
boolean forReading) throws IOException {
|
boolean forReading, boolean inProgressOk) throws IOException {
|
||||||
// No need to checkRequest() here - anyone may ask for the list
|
// No need to checkRequest() here - anyone may ask for the list
|
||||||
// of segments.
|
// of segments.
|
||||||
checkFormatted();
|
checkFormatted();
|
||||||
|
|
||||||
RemoteEditLogManifest manifest = new RemoteEditLogManifest(
|
// if this is for reading, ignore the in-progress editlog segment
|
||||||
fjm.getRemoteEditLogs(sinceTxId, forReading));
|
inProgressOk = forReading ? false : inProgressOk;
|
||||||
return manifest;
|
List<RemoteEditLog> logs = fjm.getRemoteEditLogs(sinceTxId, forReading,
|
||||||
|
inProgressOk);
|
||||||
|
|
||||||
|
if (inProgressOk) {
|
||||||
|
RemoteEditLog log = null;
|
||||||
|
for (Iterator<RemoteEditLog> iter = logs.iterator(); iter.hasNext();) {
|
||||||
|
log = iter.next();
|
||||||
|
if (log.isInProgress()) {
|
||||||
|
iter.remove();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (log != null && log.isInProgress()) {
|
||||||
|
logs.add(new RemoteEditLog(log.getStartTxId(), getHighestWrittenTxId()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return new RemoteEditLogManifest(logs);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -18,8 +18,10 @@
|
||||||
package org.apache.hadoop.hdfs.qjournal.server;
|
package org.apache.hadoop.hdfs.qjournal.server;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.FileFilter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
|
@ -34,11 +36,13 @@ import org.apache.hadoop.hdfs.server.common.StorageErrorReporter;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||||
import org.apache.hadoop.metrics2.source.JvmMetrics;
|
import org.apache.hadoop.metrics2.source.JvmMetrics;
|
||||||
|
import org.apache.hadoop.metrics2.util.MBeans;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.util.DiskChecker;
|
import org.apache.hadoop.util.DiskChecker;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.mortbay.util.ajax.JSON;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
@ -51,7 +55,7 @@ import com.google.common.collect.Maps;
|
||||||
* in the quorum protocol.
|
* in the quorum protocol.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class JournalNode implements Tool, Configurable {
|
public class JournalNode implements Tool, Configurable, JournalNodeMXBean {
|
||||||
public static final Log LOG = LogFactory.getLog(JournalNode.class);
|
public static final Log LOG = LogFactory.getLog(JournalNode.class);
|
||||||
private Configuration conf;
|
private Configuration conf;
|
||||||
private JournalNodeRpcServer rpcServer;
|
private JournalNodeRpcServer rpcServer;
|
||||||
|
@ -128,6 +132,8 @@ public class JournalNode implements Tool, Configurable {
|
||||||
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
|
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
|
||||||
DFSConfigKeys.DFS_JOURNALNODE_USER_NAME_KEY, socAddr.getHostName());
|
DFSConfigKeys.DFS_JOURNALNODE_USER_NAME_KEY, socAddr.getHostName());
|
||||||
|
|
||||||
|
registerJNMXBean();
|
||||||
|
|
||||||
httpServer = new JournalNodeHttpServer(conf, this);
|
httpServer = new JournalNodeHttpServer(conf, this);
|
||||||
httpServer.start();
|
httpServer.start();
|
||||||
|
|
||||||
|
@ -208,6 +214,50 @@ public class JournalNode implements Tool, Configurable {
|
||||||
return new File(new File(dir), jid);
|
return new File(new File(dir), jid);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override // JournalNodeMXBean
|
||||||
|
public String getJournalsStatus() {
|
||||||
|
// jid:{Formatted:True/False}
|
||||||
|
Map<String, Map<String, String>> status =
|
||||||
|
new HashMap<String, Map<String, String>>();
|
||||||
|
synchronized (this) {
|
||||||
|
for (Map.Entry<String, Journal> entry : journalsById.entrySet()) {
|
||||||
|
Map<String, String> jMap = new HashMap<String, String>();
|
||||||
|
jMap.put("Formatted", Boolean.toString(entry.getValue().isFormatted()));
|
||||||
|
status.put(entry.getKey(), jMap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// It is possible that some journals have been formatted before, while the
|
||||||
|
// corresponding journals are not in journalsById yet (because of restarting
|
||||||
|
// JN, e.g.). For simplicity, let's just assume a journal is formatted if
|
||||||
|
// there is a directory for it. We can also call analyzeStorage method for
|
||||||
|
// these directories if necessary.
|
||||||
|
// Also note that we do not need to check localDir here since
|
||||||
|
// validateAndCreateJournalDir has been called before we register the
|
||||||
|
// MXBean.
|
||||||
|
File[] journalDirs = localDir.listFiles(new FileFilter() {
|
||||||
|
@Override
|
||||||
|
public boolean accept(File file) {
|
||||||
|
return file.isDirectory();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
for (File journalDir : journalDirs) {
|
||||||
|
String jid = journalDir.getName();
|
||||||
|
if (!status.containsKey(jid)) {
|
||||||
|
Map<String, String> jMap = new HashMap<String, String>();
|
||||||
|
jMap.put("Formatted", "true");
|
||||||
|
status.put(jid, jMap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return JSON.toString(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register JournalNodeMXBean
|
||||||
|
*/
|
||||||
|
private void registerJNMXBean() {
|
||||||
|
MBeans.register("JournalNode", "JournalNodeInfo", this);
|
||||||
|
}
|
||||||
|
|
||||||
private class ErrorReporter implements StorageErrorReporter {
|
private class ErrorReporter implements StorageErrorReporter {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -36,8 +36,8 @@ import org.apache.hadoop.hdfs.server.common.JspHelper;
|
||||||
import org.apache.hadoop.http.HttpServer;
|
import org.apache.hadoop.http.HttpServer;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates the HTTP server started by the Journal Service.
|
* Encapsulates the HTTP server started by the Journal Service.
|
||||||
|
@ -69,16 +69,15 @@ public class JournalNodeHttpServer {
|
||||||
bindAddr.getHostName()));
|
bindAddr.getHostName()));
|
||||||
|
|
||||||
int tmpInfoPort = bindAddr.getPort();
|
int tmpInfoPort = bindAddr.getPort();
|
||||||
httpServer = new HttpServer("journal", bindAddr.getHostName(),
|
httpServer = new HttpServer.Builder().setName("journal")
|
||||||
tmpInfoPort, tmpInfoPort == 0, conf, new AccessControlList(conf
|
.setBindAddress(bindAddr.getHostName()).setPort(tmpInfoPort)
|
||||||
.get(DFS_ADMIN, " "))) {
|
.setFindPort(tmpInfoPort == 0).setConf(conf).setACL(
|
||||||
{
|
new AccessControlList(conf.get(DFS_ADMIN, " ")))
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
.setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
|
||||||
initSpnego(conf, DFS_JOURNALNODE_INTERNAL_SPNEGO_USER_NAME_KEY,
|
.setUsernameConfKey(
|
||||||
DFSUtil.getSpnegoKeytabKey(conf, DFS_JOURNALNODE_KEYTAB_FILE_KEY));
|
DFS_JOURNALNODE_INTERNAL_SPNEGO_USER_NAME_KEY)
|
||||||
}
|
.setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf,
|
||||||
}
|
DFS_JOURNALNODE_KEYTAB_FILE_KEY)).build();
|
||||||
};
|
|
||||||
httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
|
httpServer.setAttribute(JN_ATTRIBUTE_KEY, localJournalNode);
|
||||||
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
||||||
httpServer.addInternalServlet("getJournal", "/getJournal",
|
httpServer.addInternalServlet("getJournal", "/getJournal",
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hdfs.qjournal.server;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the JMX management interface for JournalNode information
|
||||||
|
*/
|
||||||
|
@InterfaceAudience.Public
|
||||||
|
@InterfaceStability.Evolving
|
||||||
|
public interface JournalNodeMXBean {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get status information (e.g., whether formatted) of JournalNode's journals.
|
||||||
|
*
|
||||||
|
* @return A string presenting status for each journal
|
||||||
|
*/
|
||||||
|
public String getJournalsStatus();
|
||||||
|
}
|
|
@ -175,10 +175,11 @@ class JournalNodeRpcServer implements QJournalProtocol {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GetEditLogManifestResponseProto getEditLogManifest(String jid,
|
public GetEditLogManifestResponseProto getEditLogManifest(String jid,
|
||||||
long sinceTxId, boolean forReading) throws IOException {
|
long sinceTxId, boolean forReading, boolean inProgressOk)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
RemoteEditLogManifest manifest = jn.getOrCreateJournal(jid)
|
RemoteEditLogManifest manifest = jn.getOrCreateJournal(jid)
|
||||||
.getEditLogManifest(sinceTxId, forReading);
|
.getEditLogManifest(sinceTxId, forReading, inProgressOk);
|
||||||
|
|
||||||
return GetEditLogManifestResponseProto.newBuilder()
|
return GetEditLogManifestResponseProto.newBuilder()
|
||||||
.setManifest(PBHelper.convert(manifest))
|
.setManifest(PBHelper.convert(manifest))
|
||||||
|
|
|
@ -81,6 +81,28 @@ public class DelegationTokenSecretManager
|
||||||
return new DelegationTokenIdentifier();
|
return new DelegationTokenIdentifier();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public synchronized byte[] retrievePassword(
|
||||||
|
DelegationTokenIdentifier identifier) throws InvalidToken {
|
||||||
|
try {
|
||||||
|
// this check introduces inconsistency in the authentication to a
|
||||||
|
// HA standby NN. non-token auths are allowed into the namespace which
|
||||||
|
// decides whether to throw a StandbyException. tokens are a bit
|
||||||
|
// different in that a standby may be behind and thus not yet know
|
||||||
|
// of all tokens issued by the active NN. the following check does
|
||||||
|
// not allow ANY token auth, however it should allow known tokens in
|
||||||
|
checkAvailableForRead();
|
||||||
|
} catch (StandbyException se) {
|
||||||
|
// FIXME: this is a hack to get around changing method signatures by
|
||||||
|
// tunneling a non-InvalidToken exception as the cause which the
|
||||||
|
// RPC server will unwrap before returning to the client
|
||||||
|
InvalidToken wrappedStandby = new InvalidToken("StandbyException");
|
||||||
|
wrappedStandby.initCause(se);
|
||||||
|
throw wrappedStandby;
|
||||||
|
}
|
||||||
|
return super.retrievePassword(identifier);
|
||||||
|
}
|
||||||
|
|
||||||
@Override //SecretManager
|
@Override //SecretManager
|
||||||
public void checkAvailableForRead() throws StandbyException {
|
public void checkAvailableForRead() throws StandbyException {
|
||||||
namesystem.checkOperation(OperationCategory.READ);
|
namesystem.checkOperation(OperationCategory.READ);
|
||||||
|
|
|
@ -169,16 +169,17 @@ public class BlockPlacementPolicyWithNodeGroup extends BlockPlacementPolicyDefau
|
||||||
long blocksize, int maxReplicasPerRack, List<DatanodeDescriptor> results,
|
long blocksize, int maxReplicasPerRack, List<DatanodeDescriptor> results,
|
||||||
boolean avoidStaleNodes) throws NotEnoughReplicasException {
|
boolean avoidStaleNodes) throws NotEnoughReplicasException {
|
||||||
int oldNumOfReplicas = results.size();
|
int oldNumOfReplicas = results.size();
|
||||||
// randomly choose one node from remote racks
|
|
||||||
|
final String rackLocation = NetworkTopology.getFirstHalf(
|
||||||
|
localMachine.getNetworkLocation());
|
||||||
try {
|
try {
|
||||||
chooseRandom(
|
// randomly choose from remote racks
|
||||||
numOfReplicas,
|
chooseRandom(numOfReplicas, "~" + rackLocation, excludedNodes, blocksize,
|
||||||
"~" + NetworkTopology.getFirstHalf(localMachine.getNetworkLocation()),
|
maxReplicasPerRack, results, avoidStaleNodes);
|
||||||
excludedNodes, blocksize, maxReplicasPerRack, results,
|
|
||||||
avoidStaleNodes);
|
|
||||||
} catch (NotEnoughReplicasException e) {
|
} catch (NotEnoughReplicasException e) {
|
||||||
|
// fall back to the local rack
|
||||||
chooseRandom(numOfReplicas - (results.size() - oldNumOfReplicas),
|
chooseRandom(numOfReplicas - (results.size() - oldNumOfReplicas),
|
||||||
localMachine.getNetworkLocation(), excludedNodes, blocksize,
|
rackLocation, excludedNodes, blocksize,
|
||||||
maxReplicasPerRack, results, avoidStaleNodes);
|
maxReplicasPerRack, results, avoidStaleNodes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -385,11 +385,15 @@ public class DataNode extends Configured
|
||||||
String infoHost = infoSocAddr.getHostName();
|
String infoHost = infoSocAddr.getHostName();
|
||||||
int tmpInfoPort = infoSocAddr.getPort();
|
int tmpInfoPort = infoSocAddr.getPort();
|
||||||
this.infoServer = (secureResources == null)
|
this.infoServer = (secureResources == null)
|
||||||
? new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
|
? new HttpServer.Builder().setName("datanode")
|
||||||
conf, new AccessControlList(conf.get(DFS_ADMIN, " ")))
|
.setBindAddress(infoHost).setPort(tmpInfoPort)
|
||||||
: new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
|
.setFindPort(tmpInfoPort == 0).setConf(conf)
|
||||||
conf, new AccessControlList(conf.get(DFS_ADMIN, " ")),
|
.setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))).build()
|
||||||
secureResources.getListener());
|
: new HttpServer.Builder().setName("datanode")
|
||||||
|
.setBindAddress(infoHost).setPort(tmpInfoPort)
|
||||||
|
.setFindPort(tmpInfoPort == 0).setConf(conf)
|
||||||
|
.setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")))
|
||||||
|
.setConnector(secureResources.getListener()).build();
|
||||||
LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
|
LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
|
||||||
if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
|
if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
|
||||||
boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
|
boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
|
||||||
|
|
|
@ -19,7 +19,9 @@ package org.apache.hadoop.hdfs.server.datanode;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.net.URI;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLEncoder;
|
import java.net.URLEncoder;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
@ -27,6 +29,7 @@ import java.text.SimpleDateFormat;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import javax.servlet.ServletContext;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import javax.servlet.jsp.JspWriter;
|
import javax.servlet.jsp.JspWriter;
|
||||||
|
@ -36,6 +39,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.DFSClient;
|
import org.apache.hadoop.hdfs.DFSClient;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
||||||
|
@ -43,6 +47,9 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager;
|
import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager;
|
||||||
import org.apache.hadoop.hdfs.server.common.JspHelper;
|
import org.apache.hadoop.hdfs.server.common.JspHelper;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
|
||||||
import org.apache.hadoop.http.HtmlQuoting;
|
import org.apache.hadoop.http.HtmlQuoting;
|
||||||
import org.apache.hadoop.http.HttpConfig;
|
import org.apache.hadoop.http.HttpConfig;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
@ -50,6 +57,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.util.ServletUtil;
|
import org.apache.hadoop.util.ServletUtil;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class DatanodeJspHelper {
|
public class DatanodeJspHelper {
|
||||||
|
@ -712,4 +720,24 @@ public class DatanodeJspHelper {
|
||||||
final String nnAddr = request.getParameter(JspHelper.NAMENODE_ADDRESS);
|
final String nnAddr = request.getParameter(JspHelper.NAMENODE_ADDRESS);
|
||||||
return getDFSClient(ugi, nnAddr, conf);
|
return getDFSClient(ugi, nnAddr, conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Return a table containing version information. */
|
||||||
|
public static String getVersionTable(ServletContext context) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
final DataNode dataNode = (DataNode) context.getAttribute("datanode");
|
||||||
|
sb.append("<div class='dfstable'><table>");
|
||||||
|
sb.append("<tr><td class='col1'>Version:</td><td>");
|
||||||
|
sb.append(VersionInfo.getVersion() + ", " + VersionInfo.getRevision());
|
||||||
|
sb.append("</td></tr>\n" + "\n <tr><td class='col1'>Compiled:</td><td>"
|
||||||
|
+ VersionInfo.getDate());
|
||||||
|
sb.append(" by " + VersionInfo.getUser() + " from "
|
||||||
|
+ VersionInfo.getBranch());
|
||||||
|
if (dataNode != null) {
|
||||||
|
sb.append("</td></tr>\n <tr><td class='col1'>Cluster ID:</td><td>"
|
||||||
|
+ dataNode.getClusterId());
|
||||||
|
}
|
||||||
|
sb.append("</td></tr>\n</table></div>");
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -410,8 +410,9 @@ public class DatanodeWebHdfsMethods {
|
||||||
throw ioe;
|
throw ioe;
|
||||||
}
|
}
|
||||||
|
|
||||||
final long n = length.getValue() != null? length.getValue()
|
final long n = length.getValue() != null ?
|
||||||
: in.getVisibleLength() - offset.getValue();
|
Math.min(length.getValue(), in.getVisibleLength() - offset.getValue()) :
|
||||||
|
in.getVisibleLength() - offset.getValue();
|
||||||
return Response.ok(new OpenEntity(in, n, dfsclient)).type(
|
return Response.ok(new OpenEntity(in, n, dfsclient)).type(
|
||||||
MediaType.APPLICATION_OCTET_STREAM).build();
|
MediaType.APPLICATION_OCTET_STREAM).build();
|
||||||
}
|
}
|
||||||
|
|
|
@ -2093,6 +2093,10 @@ public class FSDirectory implements Closeable {
|
||||||
/** Verify if the snapshot name is legal. */
|
/** Verify if the snapshot name is legal. */
|
||||||
void verifySnapshotName(String snapshotName, String path)
|
void verifySnapshotName(String snapshotName, String path)
|
||||||
throws PathComponentTooLongException {
|
throws PathComponentTooLongException {
|
||||||
|
if (snapshotName.contains(Path.SEPARATOR)) {
|
||||||
|
throw new HadoopIllegalArgumentException(
|
||||||
|
"Snapshot name cannot contain \"" + Path.SEPARATOR + "\"");
|
||||||
|
}
|
||||||
final byte[] bytes = DFSUtil.string2Bytes(snapshotName);
|
final byte[] bytes = DFSUtil.string2Bytes(snapshotName);
|
||||||
verifyINodeName(bytes);
|
verifyINodeName(bytes);
|
||||||
verifyMaxComponentLength(bytes, path, 0);
|
verifyMaxComponentLength(bytes, path, 0);
|
||||||
|
@ -2726,6 +2730,19 @@ public class FSDirectory implements Closeable {
|
||||||
throw new FileNotFoundException(
|
throw new FileNotFoundException(
|
||||||
"File for given inode path does not exist: " + src);
|
"File for given inode path does not exist: " + src);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle single ".." for NFS lookup support.
|
||||||
|
if ((pathComponents.length > 4)
|
||||||
|
&& DFSUtil.bytes2String(pathComponents[4]).equals("..")) {
|
||||||
|
INode parent = inode.getParent();
|
||||||
|
if (parent == null || parent.getId() == INodeId.ROOT_INODE_ID) {
|
||||||
|
// inode is root, or its parent is root.
|
||||||
|
return Path.SEPARATOR;
|
||||||
|
} else {
|
||||||
|
return parent.getFullPathName();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
StringBuilder path = id == INodeId.ROOT_INODE_ID ? new StringBuilder()
|
StringBuilder path = id == INodeId.ROOT_INODE_ID ? new StringBuilder()
|
||||||
: new StringBuilder(inode.getFullPathName());
|
: new StringBuilder(inode.getFullPathName());
|
||||||
for (int i = 4; i < pathComponents.length; i++) {
|
for (int i = 4; i < pathComponents.length; i++) {
|
||||||
|
|
|
@ -1274,6 +1274,7 @@ public class FSEditLog implements LogsPurgeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void selectInputStreams(Collection<EditLogInputStream> streams,
|
public void selectInputStreams(Collection<EditLogInputStream> streams,
|
||||||
long fromTxId, boolean inProgressOk, boolean forReading) {
|
long fromTxId, boolean inProgressOk, boolean forReading) {
|
||||||
journalSet.selectInputStreams(streams, fromTxId, inProgressOk, forReading);
|
journalSet.selectInputStreams(streams, fromTxId, inProgressOk, forReading);
|
||||||
|
@ -1284,18 +1285,27 @@ public class FSEditLog implements LogsPurgeable {
|
||||||
return selectInputStreams(fromTxId, toAtLeastTxId, null, true);
|
return selectInputStreams(fromTxId, toAtLeastTxId, null, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Select a list of input streams to load */
|
||||||
|
public Collection<EditLogInputStream> selectInputStreams(
|
||||||
|
long fromTxId, long toAtLeastTxId, MetaRecoveryContext recovery,
|
||||||
|
boolean inProgressOk) throws IOException {
|
||||||
|
return selectInputStreams(fromTxId, toAtLeastTxId, recovery, inProgressOk,
|
||||||
|
true);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Select a list of input streams to load.
|
* Select a list of input streams.
|
||||||
*
|
*
|
||||||
* @param fromTxId first transaction in the selected streams
|
* @param fromTxId first transaction in the selected streams
|
||||||
* @param toAtLeast the selected streams must contain this transaction
|
* @param toAtLeast the selected streams must contain this transaction
|
||||||
* @param inProgessOk set to true if in-progress streams are OK
|
* @param inProgessOk set to true if in-progress streams are OK
|
||||||
|
* @param forReading whether or not to use the streams to load the edit log
|
||||||
*/
|
*/
|
||||||
public synchronized Collection<EditLogInputStream> selectInputStreams(
|
public synchronized Collection<EditLogInputStream> selectInputStreams(
|
||||||
long fromTxId, long toAtLeastTxId, MetaRecoveryContext recovery,
|
long fromTxId, long toAtLeastTxId, MetaRecoveryContext recovery,
|
||||||
boolean inProgressOk) throws IOException {
|
boolean inProgressOk, boolean forReading) throws IOException {
|
||||||
List<EditLogInputStream> streams = new ArrayList<EditLogInputStream>();
|
List<EditLogInputStream> streams = new ArrayList<EditLogInputStream>();
|
||||||
selectInputStreams(streams, fromTxId, inProgressOk, true);
|
selectInputStreams(streams, fromTxId, inProgressOk, forReading);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
checkForGaps(streams, fromTxId, toAtLeastTxId, inProgressOk);
|
checkForGaps(streams, fromTxId, toAtLeastTxId, inProgressOk);
|
||||||
|
|
|
@ -2153,10 +2153,15 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
|
||||||
throw new FileNotFoundException("failed to append to non-existent file "
|
throw new FileNotFoundException("failed to append to non-existent file "
|
||||||
+ src + " on client " + clientMachine);
|
+ src + " on client " + clientMachine);
|
||||||
}
|
}
|
||||||
final INodeFile myFile = INodeFile.valueOf(inode, src, true);
|
INodeFile myFile = INodeFile.valueOf(inode, src, true);
|
||||||
// Opening an existing file for write - may need to recover lease.
|
// Opening an existing file for write - may need to recover lease.
|
||||||
recoverLeaseInternal(myFile, src, holder, clientMachine, false);
|
recoverLeaseInternal(myFile, src, holder, clientMachine, false);
|
||||||
|
|
||||||
|
// recoverLeaseInternal may create a new InodeFile via
|
||||||
|
// finalizeINodeFileUnderConstruction so we need to refresh
|
||||||
|
// the referenced file.
|
||||||
|
myFile = INodeFile.valueOf(dir.getINode(src), src, true);
|
||||||
|
|
||||||
final DatanodeDescriptor clientNode =
|
final DatanodeDescriptor clientNode =
|
||||||
blockManager.getDatanodeManager().getDatanodeByHost(clientMachine);
|
blockManager.getDatanodeManager().getDatanodeByHost(clientMachine);
|
||||||
return prepareFileForWrite(src, myFile, holder, clientMachine, clientNode,
|
return prepareFileForWrite(src, myFile, holder, clientMachine, clientNode,
|
||||||
|
@ -6364,6 +6369,16 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
|
||||||
return JSON.toString(jasList);
|
return JSON.toString(jasList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override // NameNodeMxBean
|
||||||
|
public String getJournalTransactionInfo() {
|
||||||
|
Map<String, String> txnIdMap = new HashMap<String, String>();
|
||||||
|
txnIdMap.put("LastAppliedOrWrittenTxId",
|
||||||
|
Long.toString(this.getFSImage().getLastAppliedOrWrittenTxId()));
|
||||||
|
txnIdMap.put("MostRecentCheckpointTxId",
|
||||||
|
Long.toString(this.getFSImage().getMostRecentCheckpointTxId()));
|
||||||
|
return JSON.toString(txnIdMap);
|
||||||
|
}
|
||||||
|
|
||||||
@Override // NameNodeMXBean
|
@Override // NameNodeMXBean
|
||||||
public String getNNStarted() {
|
public String getNNStarted() {
|
||||||
return getStartTime().toString();
|
return getStartTime().toString();
|
||||||
|
|
|
@ -169,18 +169,26 @@ public class FileJournalManager implements JournalManager {
|
||||||
* @param fromTxId the txnid which to start looking
|
* @param fromTxId the txnid which to start looking
|
||||||
* @param forReading whether or not the caller intends to read from the edit
|
* @param forReading whether or not the caller intends to read from the edit
|
||||||
* logs
|
* logs
|
||||||
|
* @param inProgressOk whether or not to include the in-progress edit log
|
||||||
|
* segment
|
||||||
* @return a list of remote edit logs
|
* @return a list of remote edit logs
|
||||||
* @throws IOException if edit logs cannot be listed.
|
* @throws IOException if edit logs cannot be listed.
|
||||||
*/
|
*/
|
||||||
public List<RemoteEditLog> getRemoteEditLogs(long firstTxId,
|
public List<RemoteEditLog> getRemoteEditLogs(long firstTxId,
|
||||||
boolean forReading) throws IOException {
|
boolean forReading, boolean inProgressOk) throws IOException {
|
||||||
|
// make sure not reading in-progress edit log, i.e., if forReading is true,
|
||||||
|
// we should ignore the in-progress edit log.
|
||||||
|
Preconditions.checkArgument(!(forReading && inProgressOk));
|
||||||
|
|
||||||
File currentDir = sd.getCurrentDir();
|
File currentDir = sd.getCurrentDir();
|
||||||
List<EditLogFile> allLogFiles = matchEditLogs(currentDir);
|
List<EditLogFile> allLogFiles = matchEditLogs(currentDir);
|
||||||
List<RemoteEditLog> ret = Lists.newArrayListWithCapacity(
|
List<RemoteEditLog> ret = Lists.newArrayListWithCapacity(
|
||||||
allLogFiles.size());
|
allLogFiles.size());
|
||||||
|
|
||||||
for (EditLogFile elf : allLogFiles) {
|
for (EditLogFile elf : allLogFiles) {
|
||||||
if (elf.hasCorruptHeader() || elf.isInProgress()) continue;
|
if (elf.hasCorruptHeader() || (!inProgressOk && elf.isInProgress())) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
if (elf.getFirstTxId() >= firstTxId) {
|
if (elf.getFirstTxId() >= firstTxId) {
|
||||||
ret.add(new RemoteEditLog(elf.firstTxId, elf.lastTxId));
|
ret.add(new RemoteEditLog(elf.firstTxId, elf.lastTxId));
|
||||||
} else if (elf.getFirstTxId() < firstTxId && firstTxId <= elf.getLastTxId()) {
|
} else if (elf.getFirstTxId() < firstTxId && firstTxId <= elf.getLastTxId()) {
|
||||||
|
|
|
@ -310,11 +310,14 @@ public class GetImageServlet extends HttpServlet {
|
||||||
|
|
||||||
static String getParamStringToPutImage(long txid,
|
static String getParamStringToPutImage(long txid,
|
||||||
InetSocketAddress imageListenAddress, Storage storage) {
|
InetSocketAddress imageListenAddress, Storage storage) {
|
||||||
|
String machine = !imageListenAddress.isUnresolved()
|
||||||
|
&& imageListenAddress.getAddress().isAnyLocalAddress() ? null
|
||||||
|
: imageListenAddress.getHostName();
|
||||||
return "putimage=1" +
|
return "putimage=1" +
|
||||||
"&" + TXID_PARAM + "=" + txid +
|
"&" + TXID_PARAM + "=" + txid +
|
||||||
"&port=" + imageListenAddress.getPort() +
|
"&port=" + imageListenAddress.getPort() +
|
||||||
"&" + STORAGEINFO_PARAM + "=" +
|
(machine != null ? "&machine=" + machine : "")
|
||||||
|
+ "&" + STORAGEINFO_PARAM + "=" +
|
||||||
storage.toColonSeparatedString();
|
storage.toColonSeparatedString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -341,10 +344,6 @@ public class GetImageServlet extends HttpServlet {
|
||||||
Map<String, String[]> pmap = request.getParameterMap();
|
Map<String, String[]> pmap = request.getParameterMap();
|
||||||
isGetImage = isGetEdit = isPutImage = fetchLatest = false;
|
isGetImage = isGetEdit = isPutImage = fetchLatest = false;
|
||||||
remoteport = 0;
|
remoteport = 0;
|
||||||
machineName = request.getRemoteHost();
|
|
||||||
if (InetAddresses.isInetAddress(machineName)) {
|
|
||||||
machineName = NetUtils.getHostNameOfIP(machineName);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (Map.Entry<String, String[]> entry : pmap.entrySet()) {
|
for (Map.Entry<String, String[]> entry : pmap.entrySet()) {
|
||||||
String key = entry.getKey();
|
String key = entry.getKey();
|
||||||
|
@ -369,11 +368,20 @@ public class GetImageServlet extends HttpServlet {
|
||||||
txId = ServletUtil.parseLongParam(request, TXID_PARAM);
|
txId = ServletUtil.parseLongParam(request, TXID_PARAM);
|
||||||
} else if (key.equals("port")) {
|
} else if (key.equals("port")) {
|
||||||
remoteport = new Integer(val[0]).intValue();
|
remoteport = new Integer(val[0]).intValue();
|
||||||
|
} else if (key.equals("machine")) {
|
||||||
|
machineName = val[0];
|
||||||
} else if (key.equals(STORAGEINFO_PARAM)) {
|
} else if (key.equals(STORAGEINFO_PARAM)) {
|
||||||
storageInfoString = val[0];
|
storageInfoString = val[0];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (machineName == null) {
|
||||||
|
machineName = request.getRemoteHost();
|
||||||
|
if (InetAddresses.isInetAddress(machineName)) {
|
||||||
|
machineName = NetUtils.getHostNameOfIP(machineName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
int numGets = (isGetImage?1:0) + (isGetEdit?1:0);
|
int numGets = (isGetImage?1:0) + (isGetEdit?1:0);
|
||||||
if ((numGets > 1) || (numGets == 0) && !isPutImage) {
|
if ((numGets > 1) || (numGets == 0) && !isPutImage) {
|
||||||
throw new IOException("Illegal parameters to TransferFsImage");
|
throw new IOException("Illegal parameters to TransferFsImage");
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.namenode;
|
package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.util.ExitUtil.terminate;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
@ -31,14 +33,10 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
||||||
import static org.apache.hadoop.util.ExitUtil.terminate;
|
|
||||||
|
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.ComparisonChain;
|
import com.google.common.collect.ComparisonChain;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -257,13 +255,12 @@ public class JournalSet implements JournalManager {
|
||||||
". Skipping.", ioe);
|
". Skipping.", ioe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
chainAndMakeRedundantStreams(streams, allStreams, fromTxId, inProgressOk);
|
chainAndMakeRedundantStreams(streams, allStreams, fromTxId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void chainAndMakeRedundantStreams(
|
public static void chainAndMakeRedundantStreams(
|
||||||
Collection<EditLogInputStream> outStreams,
|
Collection<EditLogInputStream> outStreams,
|
||||||
PriorityQueue<EditLogInputStream> allStreams,
|
PriorityQueue<EditLogInputStream> allStreams, long fromTxId) {
|
||||||
long fromTxId, boolean inProgressOk) {
|
|
||||||
// We want to group together all the streams that start on the same start
|
// We want to group together all the streams that start on the same start
|
||||||
// transaction ID. To do this, we maintain an accumulator (acc) of all
|
// transaction ID. To do this, we maintain an accumulator (acc) of all
|
||||||
// the streams we've seen at a given start transaction ID. When we see a
|
// the streams we've seen at a given start transaction ID. When we see a
|
||||||
|
@ -598,7 +595,7 @@ public class JournalSet implements JournalManager {
|
||||||
if (j.getManager() instanceof FileJournalManager) {
|
if (j.getManager() instanceof FileJournalManager) {
|
||||||
FileJournalManager fjm = (FileJournalManager)j.getManager();
|
FileJournalManager fjm = (FileJournalManager)j.getManager();
|
||||||
try {
|
try {
|
||||||
allLogs.addAll(fjm.getRemoteEditLogs(fromTxId, forReading));
|
allLogs.addAll(fjm.getRemoteEditLogs(fromTxId, forReading, false));
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
LOG.warn("Cannot list edit logs in " + fjm, t);
|
LOG.warn("Cannot list edit logs in " + fjm, t);
|
||||||
}
|
}
|
||||||
|
|
|
@ -956,41 +956,49 @@ public class NameNode implements NameNodeStatusMXBean {
|
||||||
FSEditLog sourceEditLog = fsns.getFSImage().editLog;
|
FSEditLog sourceEditLog = fsns.getFSImage().editLog;
|
||||||
|
|
||||||
long fromTxId = fsns.getFSImage().getMostRecentCheckpointTxId();
|
long fromTxId = fsns.getFSImage().getMostRecentCheckpointTxId();
|
||||||
Collection<EditLogInputStream> streams = sourceEditLog.selectInputStreams(
|
|
||||||
fromTxId+1, 0);
|
|
||||||
|
|
||||||
// Set the nextTxid to the CheckpointTxId+1
|
|
||||||
newSharedEditLog.setNextTxId(fromTxId + 1);
|
|
||||||
|
|
||||||
// Copy all edits after last CheckpointTxId to shared edits dir
|
Collection<EditLogInputStream> streams = null;
|
||||||
for (EditLogInputStream stream : streams) {
|
try {
|
||||||
LOG.debug("Beginning to copy stream " + stream + " to shared edits");
|
streams = sourceEditLog.selectInputStreams(fromTxId + 1, 0);
|
||||||
FSEditLogOp op;
|
|
||||||
boolean segmentOpen = false;
|
|
||||||
while ((op = stream.readOp()) != null) {
|
|
||||||
if (LOG.isTraceEnabled()) {
|
|
||||||
LOG.trace("copying op: " + op);
|
|
||||||
}
|
|
||||||
if (!segmentOpen) {
|
|
||||||
newSharedEditLog.startLogSegment(op.txid, false);
|
|
||||||
segmentOpen = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
newSharedEditLog.logEdit(op);
|
|
||||||
|
|
||||||
if (op.opCode == FSEditLogOpCodes.OP_END_LOG_SEGMENT) {
|
// Set the nextTxid to the CheckpointTxId+1
|
||||||
|
newSharedEditLog.setNextTxId(fromTxId + 1);
|
||||||
|
|
||||||
|
// Copy all edits after last CheckpointTxId to shared edits dir
|
||||||
|
for (EditLogInputStream stream : streams) {
|
||||||
|
LOG.debug("Beginning to copy stream " + stream + " to shared edits");
|
||||||
|
FSEditLogOp op;
|
||||||
|
boolean segmentOpen = false;
|
||||||
|
while ((op = stream.readOp()) != null) {
|
||||||
|
if (LOG.isTraceEnabled()) {
|
||||||
|
LOG.trace("copying op: " + op);
|
||||||
|
}
|
||||||
|
if (!segmentOpen) {
|
||||||
|
newSharedEditLog.startLogSegment(op.txid, false);
|
||||||
|
segmentOpen = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
newSharedEditLog.logEdit(op);
|
||||||
|
|
||||||
|
if (op.opCode == FSEditLogOpCodes.OP_END_LOG_SEGMENT) {
|
||||||
|
newSharedEditLog.logSync();
|
||||||
|
newSharedEditLog.endCurrentLogSegment(false);
|
||||||
|
LOG.debug("ending log segment because of END_LOG_SEGMENT op in "
|
||||||
|
+ stream);
|
||||||
|
segmentOpen = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (segmentOpen) {
|
||||||
|
LOG.debug("ending log segment because of end of stream in " + stream);
|
||||||
newSharedEditLog.logSync();
|
newSharedEditLog.logSync();
|
||||||
newSharedEditLog.endCurrentLogSegment(false);
|
newSharedEditLog.endCurrentLogSegment(false);
|
||||||
LOG.debug("ending log segment because of END_LOG_SEGMENT op in " + stream);
|
|
||||||
segmentOpen = false;
|
segmentOpen = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} finally {
|
||||||
if (segmentOpen) {
|
if (streams != null) {
|
||||||
LOG.debug("ending log segment because of end of stream in " + stream);
|
FSEditLog.closeAllStreams(streams);
|
||||||
newSharedEditLog.logSync();
|
|
||||||
newSharedEditLog.endCurrentLogSegment(false);
|
|
||||||
segmentOpen = false;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.namenode;
|
package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -70,66 +71,31 @@ public class NameNodeHttpServer {
|
||||||
public void start() throws IOException {
|
public void start() throws IOException {
|
||||||
final String infoHost = bindAddress.getHostName();
|
final String infoHost = bindAddress.getHostName();
|
||||||
int infoPort = bindAddress.getPort();
|
int infoPort = bindAddress.getPort();
|
||||||
|
httpServer = new HttpServer.Builder().setName("hdfs")
|
||||||
|
.setBindAddress(infoHost).setPort(infoPort)
|
||||||
|
.setFindPort(infoPort == 0).setConf(conf).setACL(
|
||||||
|
new AccessControlList(conf.get(DFS_ADMIN, " ")))
|
||||||
|
.setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
|
||||||
|
.setUsernameConfKey(
|
||||||
|
DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY)
|
||||||
|
.setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf,
|
||||||
|
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)).build();
|
||||||
|
if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) {
|
||||||
|
//add SPNEGO authentication filter for webhdfs
|
||||||
|
final String name = "SPNEGO";
|
||||||
|
final String classname = AuthFilter.class.getName();
|
||||||
|
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
|
||||||
|
Map<String, String> params = getAuthFilterParams(conf);
|
||||||
|
httpServer.defineFilter(httpServer.getWebAppContext(), name, classname, params,
|
||||||
|
new String[]{pathSpec});
|
||||||
|
HttpServer.LOG.info("Added filter '" + name + "' (class=" + classname + ")");
|
||||||
|
|
||||||
httpServer = new HttpServer("hdfs", infoHost, infoPort,
|
// add webhdfs packages
|
||||||
infoPort == 0, conf,
|
httpServer.addJerseyResourcePackage(
|
||||||
new AccessControlList(conf.get(DFS_ADMIN, " "))) {
|
NamenodeWebHdfsMethods.class.getPackage().getName()
|
||||||
{
|
+ ";" + Param.class.getPackage().getName(), pathSpec);
|
||||||
// Add SPNEGO support to NameNode
|
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
initSpnego(conf,
|
|
||||||
DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY,
|
|
||||||
DFSUtil.getSpnegoKeytabKey(conf,
|
|
||||||
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
|
|
||||||
}
|
|
||||||
if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
|
|
||||||
//add SPNEGO authentication filter for webhdfs
|
|
||||||
final String name = "SPNEGO";
|
|
||||||
final String classname = AuthFilter.class.getName();
|
|
||||||
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
|
|
||||||
Map<String, String> params = getAuthFilterParams(conf);
|
|
||||||
defineFilter(webAppContext, name, classname, params,
|
|
||||||
new String[]{pathSpec});
|
|
||||||
LOG.info("Added filter '" + name + "' (class=" + classname + ")");
|
|
||||||
|
|
||||||
// add webhdfs packages
|
|
||||||
addJerseyResourcePackage(
|
|
||||||
NamenodeWebHdfsMethods.class.getPackage().getName()
|
|
||||||
+ ";" + Param.class.getPackage().getName(), pathSpec);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, String> getAuthFilterParams(Configuration conf)
|
|
||||||
throws IOException {
|
|
||||||
Map<String, String> params = new HashMap<String, String>();
|
|
||||||
String principalInConf = conf
|
|
||||||
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
|
|
||||||
if (principalInConf != null && !principalInConf.isEmpty()) {
|
|
||||||
params
|
|
||||||
.put(
|
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
|
|
||||||
SecurityUtil.getServerPrincipal(principalInConf,
|
|
||||||
bindAddress.getHostName()));
|
|
||||||
} else if (UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
LOG.error("WebHDFS and security are enabled, but configuration property '" +
|
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY +
|
|
||||||
"' is not set.");
|
|
||||||
}
|
|
||||||
String httpKeytab = conf.get(DFSUtil.getSpnegoKeytabKey(conf,
|
|
||||||
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
|
|
||||||
if (httpKeytab != null && !httpKeytab.isEmpty()) {
|
|
||||||
params.put(
|
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
|
|
||||||
httpKeytab);
|
|
||||||
} else if (UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
LOG.error("WebHDFS and security are enabled, but configuration property '" +
|
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY +
|
|
||||||
"' is not set.");
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false);
|
boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false);
|
||||||
if (certSSL) {
|
if (certSSL) {
|
||||||
boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
|
boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
|
||||||
|
@ -153,6 +119,38 @@ public class NameNodeHttpServer {
|
||||||
httpServer.start();
|
httpServer.start();
|
||||||
httpAddress = new InetSocketAddress(bindAddress.getAddress(), httpServer.getPort());
|
httpAddress = new InetSocketAddress(bindAddress.getAddress(), httpServer.getPort());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<String, String> getAuthFilterParams(Configuration conf)
|
||||||
|
throws IOException {
|
||||||
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
|
String principalInConf = conf
|
||||||
|
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
|
||||||
|
if (principalInConf != null && !principalInConf.isEmpty()) {
|
||||||
|
params
|
||||||
|
.put(
|
||||||
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
|
||||||
|
SecurityUtil.getServerPrincipal(principalInConf,
|
||||||
|
bindAddress.getHostName()));
|
||||||
|
} else if (UserGroupInformation.isSecurityEnabled()) {
|
||||||
|
HttpServer.LOG.error(
|
||||||
|
"WebHDFS and security are enabled, but configuration property '" +
|
||||||
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY +
|
||||||
|
"' is not set.");
|
||||||
|
}
|
||||||
|
String httpKeytab = conf.get(DFSUtil.getSpnegoKeytabKey(conf,
|
||||||
|
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
|
||||||
|
if (httpKeytab != null && !httpKeytab.isEmpty()) {
|
||||||
|
params.put(
|
||||||
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
|
||||||
|
httpKeytab);
|
||||||
|
} else if (UserGroupInformation.isSecurityEnabled()) {
|
||||||
|
HttpServer.LOG.error(
|
||||||
|
"WebHDFS and security are enabled, but configuration property '" +
|
||||||
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY +
|
||||||
|
"' is not set.");
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public void stop() throws Exception {
|
public void stop() throws Exception {
|
||||||
|
|
|
@ -188,6 +188,12 @@ public interface NameNodeMXBean {
|
||||||
* @return the name journal status information, as a JSON string.
|
* @return the name journal status information, as a JSON string.
|
||||||
*/
|
*/
|
||||||
public String getNameJournalStatus();
|
public String getNameJournalStatus();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get information about the transaction ID, including the last applied
|
||||||
|
* transaction ID and the most recent checkpoint's transaction ID
|
||||||
|
*/
|
||||||
|
public String getJournalTransactionInfo();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the NN start time
|
* Gets the NN start time
|
||||||
|
|
|
@ -256,19 +256,15 @@ public class SecondaryNameNode implements Runnable {
|
||||||
|
|
||||||
// initialize the webserver for uploading files.
|
// initialize the webserver for uploading files.
|
||||||
int tmpInfoPort = infoSocAddr.getPort();
|
int tmpInfoPort = infoSocAddr.getPort();
|
||||||
infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
|
infoServer = new HttpServer.Builder().setName("secondary")
|
||||||
tmpInfoPort == 0, conf,
|
.setBindAddress(infoBindAddress).setPort(tmpInfoPort)
|
||||||
new AccessControlList(conf.get(DFS_ADMIN, " "))) {
|
.setFindPort(tmpInfoPort == 0).setConf(conf).setACL(
|
||||||
{
|
new AccessControlList(conf.get(DFS_ADMIN, " ")))
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
.setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
|
||||||
initSpnego(
|
.setUsernameConfKey(
|
||||||
conf,
|
DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY)
|
||||||
DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY,
|
.setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf,
|
||||||
DFSUtil.getSpnegoKeytabKey(conf,
|
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY)).build();
|
||||||
DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
infoServer.setAttribute("secondary.name.node", this);
|
infoServer.setAttribute("secondary.name.node", this);
|
||||||
infoServer.setAttribute("name.system.image", checkpointImage);
|
infoServer.setAttribute("name.system.image", checkpointImage);
|
||||||
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
||||||
|
|
|
@ -226,7 +226,7 @@ public class BootstrapStandby implements Tool, Configurable {
|
||||||
try {
|
try {
|
||||||
Collection<EditLogInputStream> streams =
|
Collection<EditLogInputStream> streams =
|
||||||
image.getEditLog().selectInputStreams(
|
image.getEditLog().selectInputStreams(
|
||||||
firstTxIdInLogs, curTxIdOnOtherNode, null, true);
|
firstTxIdInLogs, curTxIdOnOtherNode, null, true, false);
|
||||||
for (EditLogInputStream stream : streams) {
|
for (EditLogInputStream stream : streams) {
|
||||||
IOUtils.closeStream(stream);
|
IOUtils.closeStream(stream);
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,9 +17,17 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.namenode.ha;
|
package org.apache.hadoop.hdfs.server.namenode.ha;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.util.Time.now;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Executors;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.concurrent.ThreadFactory;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -38,10 +46,10 @@ import org.apache.hadoop.hdfs.util.Canceler;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import static org.apache.hadoop.util.Time.now;
|
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Thread which runs inside the NN when it's in Standby state,
|
* Thread which runs inside the NN when it's in Standby state,
|
||||||
|
@ -57,6 +65,7 @@ public class StandbyCheckpointer {
|
||||||
private final FSNamesystem namesystem;
|
private final FSNamesystem namesystem;
|
||||||
private long lastCheckpointTime;
|
private long lastCheckpointTime;
|
||||||
private final CheckpointerThread thread;
|
private final CheckpointerThread thread;
|
||||||
|
private final ThreadFactory uploadThreadFactory;
|
||||||
private String activeNNAddress;
|
private String activeNNAddress;
|
||||||
private InetSocketAddress myNNAddress;
|
private InetSocketAddress myNNAddress;
|
||||||
|
|
||||||
|
@ -72,6 +81,8 @@ public class StandbyCheckpointer {
|
||||||
this.namesystem = ns;
|
this.namesystem = ns;
|
||||||
this.checkpointConf = new CheckpointConf(conf);
|
this.checkpointConf = new CheckpointConf(conf);
|
||||||
this.thread = new CheckpointerThread();
|
this.thread = new CheckpointerThread();
|
||||||
|
this.uploadThreadFactory = new ThreadFactoryBuilder().setDaemon(true)
|
||||||
|
.setNameFormat("TransferFsImageUpload-%d").build();
|
||||||
|
|
||||||
setNameNodeAddresses(conf);
|
setNameNodeAddresses(conf);
|
||||||
}
|
}
|
||||||
|
@ -142,7 +153,7 @@ public class StandbyCheckpointer {
|
||||||
|
|
||||||
private void doCheckpoint() throws InterruptedException, IOException {
|
private void doCheckpoint() throws InterruptedException, IOException {
|
||||||
assert canceler != null;
|
assert canceler != null;
|
||||||
long txid;
|
final long txid;
|
||||||
|
|
||||||
namesystem.writeLockInterruptibly();
|
namesystem.writeLockInterruptibly();
|
||||||
try {
|
try {
|
||||||
|
@ -171,9 +182,26 @@ public class StandbyCheckpointer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upload the saved checkpoint back to the active
|
// Upload the saved checkpoint back to the active
|
||||||
TransferFsImage.uploadImageFromStorage(
|
// Do this in a separate thread to avoid blocking transition to active
|
||||||
activeNNAddress, myNNAddress,
|
// See HDFS-4816
|
||||||
namesystem.getFSImage().getStorage(), txid);
|
ExecutorService executor =
|
||||||
|
Executors.newSingleThreadExecutor(uploadThreadFactory);
|
||||||
|
Future<Void> upload = executor.submit(new Callable<Void>() {
|
||||||
|
@Override
|
||||||
|
public Void call() throws IOException {
|
||||||
|
TransferFsImage.uploadImageFromStorage(
|
||||||
|
activeNNAddress, myNNAddress,
|
||||||
|
namesystem.getFSImage().getStorage(), txid);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
executor.shutdown();
|
||||||
|
try {
|
||||||
|
upload.get();
|
||||||
|
} catch (ExecutionException e) {
|
||||||
|
throw new IOException("Exception during image upload: " + e.getMessage(),
|
||||||
|
e.getCause());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -301,6 +329,7 @@ public class StandbyCheckpointer {
|
||||||
LOG.info("Checkpoint was cancelled: " + ce.getMessage());
|
LOG.info("Checkpoint was cancelled: " + ce.getMessage());
|
||||||
canceledCount++;
|
canceledCount++;
|
||||||
} catch (InterruptedException ie) {
|
} catch (InterruptedException ie) {
|
||||||
|
LOG.info("Interrupted during checkpointing", ie);
|
||||||
// Probably requested shutdown.
|
// Probably requested shutdown.
|
||||||
continue;
|
continue;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
|
|
@ -31,6 +31,7 @@ option java_generic_services = true;
|
||||||
option java_generate_equals_and_hash = true;
|
option java_generate_equals_and_hash = true;
|
||||||
package hadoop.hdfs;
|
package hadoop.hdfs;
|
||||||
|
|
||||||
|
import "HAServiceProtocol.proto";
|
||||||
import "hdfs.proto";
|
import "hdfs.proto";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -185,11 +186,7 @@ message StorageReportProto {
|
||||||
* txid - Highest transaction ID this NN has seen
|
* txid - Highest transaction ID this NN has seen
|
||||||
*/
|
*/
|
||||||
message NNHAStatusHeartbeatProto {
|
message NNHAStatusHeartbeatProto {
|
||||||
enum State {
|
required hadoop.common.HAServiceStateProto state = 1;
|
||||||
ACTIVE = 0;
|
|
||||||
STANDBY = 1;
|
|
||||||
}
|
|
||||||
required State state = 1;
|
|
||||||
required uint64 txid = 2;
|
required uint64 txid = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -177,6 +177,7 @@ message GetEditLogManifestRequestProto {
|
||||||
required uint64 sinceTxId = 2; // Transaction ID
|
required uint64 sinceTxId = 2; // Transaction ID
|
||||||
// Whether or not the client will be reading from the returned streams.
|
// Whether or not the client will be reading from the returned streams.
|
||||||
optional bool forReading = 3 [default = true];
|
optional bool forReading = 3 [default = true];
|
||||||
|
optional bool inProgressOk = 4 [default = false];
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetEditLogManifestResponseProto {
|
message GetEditLogManifestResponseProto {
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
<%
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
%>
|
||||||
|
<%@page import="org.apache.hadoop.hdfs.tools.GetConf"%>
|
||||||
|
<%@page import="org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper"%>
|
||||||
|
<%@page import="org.apache.hadoop.hdfs.server.datanode.DataNode"%>
|
||||||
|
<%@ page
|
||||||
|
contentType="text/html; charset=UTF-8"
|
||||||
|
import="org.apache.hadoop.util.ServletUtil"
|
||||||
|
%>
|
||||||
|
<%!
|
||||||
|
//for java.io.Serializable
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
%>
|
||||||
|
<%
|
||||||
|
DataNode dataNode = (DataNode)getServletContext().getAttribute("datanode");
|
||||||
|
String state = dataNode.isDatanodeUp()?"active":"inactive";
|
||||||
|
String dataNodeLabel = dataNode.getDisplayName();
|
||||||
|
%>
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<link rel="stylesheet" type="text/css" href="/static/hadoop.css">
|
||||||
|
<title>Hadoop DataNode <%=dataNodeLabel%></title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>DataNode '<%=dataNodeLabel%>' (<%=state%>)</h1>
|
||||||
|
<%= DatanodeJspHelper.getVersionTable(getServletContext()) %>
|
||||||
|
<br />
|
||||||
|
<b><a href="/logs/">DataNode Logs</a></b>
|
||||||
|
<br />
|
||||||
|
<b><a href="/logLevel">View/Set Log Level</a></b>
|
||||||
|
<br />
|
||||||
|
<b><a href="/metrics">Metrics</a></b>
|
||||||
|
<br />
|
||||||
|
<b><a href="/conf">Configuration</a></b>
|
||||||
|
<br />
|
||||||
|
<b><a href="/blockScannerReport">Block Scanner Report</a></b>
|
||||||
|
<%
|
||||||
|
out.println(ServletUtil.htmlFooter());
|
||||||
|
%>
|
|
@ -0,0 +1,35 @@
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
<meta HTTP-EQUIV="REFRESH" content="0;url=dataNodeHome.jsp"/>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Hadoop Administration</title>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<h1>Hadoop Administration</h1>
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
<li><a href="dataNodeHome.jsp">DataNode Home</a></li>
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
|
@ -1,5 +1,3 @@
|
||||||
<meta HTTP-EQUIV="REFRESH" content="0;url=dfshealth.jsp"/>
|
|
||||||
<html>
|
|
||||||
<!--
|
<!--
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@ -16,6 +14,8 @@
|
||||||
See the License for the specific language governing permissions and
|
See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
-->
|
-->
|
||||||
|
<meta HTTP-EQUIV="REFRESH" content="0;url=dfshealth.jsp"/>
|
||||||
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>Hadoop Administration</title>
|
<title>Hadoop Administration</title>
|
||||||
</head>
|
</head>
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.fs;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
|
@ -30,8 +29,6 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
|
|
||||||
import com.google.common.base.Joiner;
|
|
||||||
|
|
||||||
public class TestGlobPaths {
|
public class TestGlobPaths {
|
||||||
|
|
||||||
static class RegexPathFilter implements PathFilter {
|
static class RegexPathFilter implements PathFilter {
|
||||||
|
@ -50,6 +47,7 @@ public class TestGlobPaths {
|
||||||
|
|
||||||
static private MiniDFSCluster dfsCluster;
|
static private MiniDFSCluster dfsCluster;
|
||||||
static private FileSystem fs;
|
static private FileSystem fs;
|
||||||
|
static private FileContext fc;
|
||||||
static final private int NUM_OF_PATHS = 4;
|
static final private int NUM_OF_PATHS = 4;
|
||||||
static private String USER_DIR;
|
static private String USER_DIR;
|
||||||
private Path[] path = new Path[NUM_OF_PATHS];
|
private Path[] path = new Path[NUM_OF_PATHS];
|
||||||
|
@ -59,6 +57,7 @@ public class TestGlobPaths {
|
||||||
Configuration conf = new HdfsConfiguration();
|
Configuration conf = new HdfsConfiguration();
|
||||||
dfsCluster = new MiniDFSCluster.Builder(conf).build();
|
dfsCluster = new MiniDFSCluster.Builder(conf).build();
|
||||||
fs = FileSystem.get(conf);
|
fs = FileSystem.get(conf);
|
||||||
|
fc = FileContext.getFileContext(conf);
|
||||||
USER_DIR = fs.getHomeDirectory().toUri().getPath().toString();
|
USER_DIR = fs.getHomeDirectory().toUri().getPath().toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -466,6 +465,9 @@ public class TestGlobPaths {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void pTestEscape() throws IOException {
|
public void pTestEscape() throws IOException {
|
||||||
|
// Skip the test case on Windows because backslash will be treated as a
|
||||||
|
// path separator instead of an escaping character on Windows.
|
||||||
|
org.junit.Assume.assumeTrue(!Path.WINDOWS);
|
||||||
try {
|
try {
|
||||||
String [] files = new String[] {USER_DIR+"/ab\\[c.d"};
|
String [] files = new String[] {USER_DIR+"/ab\\[c.d"};
|
||||||
Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files);
|
Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files);
|
||||||
|
@ -620,21 +622,7 @@ public class TestGlobPaths {
|
||||||
cleanupDFS();
|
cleanupDFS();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void pTestRelativePath() throws IOException {
|
|
||||||
try {
|
|
||||||
String [] files = new String[] {"a", "abc", "abc.p", "bacd"};
|
|
||||||
Path[] matchedPath = prepareTesting("a*", files);
|
|
||||||
assertEquals(matchedPath.length, 3);
|
|
||||||
assertEquals(matchedPath[0], new Path(USER_DIR, path[0]));
|
|
||||||
assertEquals(matchedPath[1], new Path(USER_DIR, path[1]));
|
|
||||||
assertEquals(matchedPath[2], new Path(USER_DIR, path[2]));
|
|
||||||
} finally {
|
|
||||||
cleanupDFS();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Test {xx,yy} */
|
/* Test {xx,yy} */
|
||||||
@Test
|
@Test
|
||||||
public void pTestCurlyBracket() throws IOException {
|
public void pTestCurlyBracket() throws IOException {
|
||||||
|
@ -800,28 +788,24 @@ public class TestGlobPaths {
|
||||||
/**
|
/**
|
||||||
* Run a glob test on FileSystem.
|
* Run a glob test on FileSystem.
|
||||||
*/
|
*/
|
||||||
private static void testOnFileSystem(FSTestWrapperGlobTest test) throws Exception {
|
private void testOnFileSystem(FSTestWrapperGlobTest test) throws Exception {
|
||||||
Configuration conf = new HdfsConfiguration();
|
|
||||||
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
|
|
||||||
try {
|
try {
|
||||||
FileSystem fs = FileSystem.get(conf);
|
fc.mkdir(new Path(USER_DIR), FsPermission.getDefault(), true);
|
||||||
test.run(new FileSystemTestWrapper(fs), fs, null);
|
test.run(new FileSystemTestWrapper(fs), fs, null);
|
||||||
} finally {
|
} finally {
|
||||||
cluster.shutdown();
|
fc.delete(new Path(USER_DIR), true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run a glob test on FileContext.
|
* Run a glob test on FileContext.
|
||||||
*/
|
*/
|
||||||
private static void testOnFileContext(FSTestWrapperGlobTest test) throws Exception {
|
private void testOnFileContext(FSTestWrapperGlobTest test) throws Exception {
|
||||||
Configuration conf = new HdfsConfiguration();
|
|
||||||
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
|
|
||||||
try {
|
try {
|
||||||
FileContext fc = FileContext.getFileContext(conf);
|
fs.mkdirs(new Path(USER_DIR));
|
||||||
test.run(new FileContextTestWrapper(fc), null, fc);
|
test.run(new FileContextTestWrapper(fc), null, fc);
|
||||||
} finally {
|
} finally {
|
||||||
cluster.shutdown();
|
cleanupDFS();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -854,32 +838,33 @@ public class TestGlobPaths {
|
||||||
throws Exception {
|
throws Exception {
|
||||||
// Test that globbing through a symlink to a directory yields a path
|
// Test that globbing through a symlink to a directory yields a path
|
||||||
// containing that symlink.
|
// containing that symlink.
|
||||||
wrap.mkdir(new Path("/alpha"),
|
wrap.mkdir(new Path(USER_DIR + "/alpha"), FsPermission.getDirDefault(),
|
||||||
FsPermission.getDirDefault(), false);
|
false);
|
||||||
wrap.createSymlink(new Path("/alpha"), new Path("/alphaLink"), false);
|
wrap.createSymlink(new Path(USER_DIR + "/alpha"), new Path(USER_DIR
|
||||||
wrap.mkdir(new Path("/alphaLink/beta"),
|
+ "/alphaLink"), false);
|
||||||
|
wrap.mkdir(new Path(USER_DIR + "/alphaLink/beta"),
|
||||||
FsPermission.getDirDefault(), false);
|
FsPermission.getDirDefault(), false);
|
||||||
// Test simple glob
|
// Test simple glob
|
||||||
FileStatus[] statuses =
|
FileStatus[] statuses = wrap.globStatus(new Path(USER_DIR + "/alpha/*"),
|
||||||
wrap.globStatus(new Path("/alpha/*"), new AcceptAllPathFilter());
|
|
||||||
Assert.assertEquals(1, statuses.length);
|
|
||||||
Assert.assertEquals("/alpha/beta",
|
|
||||||
statuses[0].getPath().toUri().getPath());
|
|
||||||
// Test glob through symlink
|
|
||||||
statuses =
|
|
||||||
wrap.globStatus(new Path("/alphaLink/*"), new AcceptAllPathFilter());
|
|
||||||
Assert.assertEquals(1, statuses.length);
|
|
||||||
Assert.assertEquals("/alphaLink/beta",
|
|
||||||
statuses[0].getPath().toUri().getPath());
|
|
||||||
// If the terminal path component in a globbed path is a symlink,
|
|
||||||
// we don't dereference that link.
|
|
||||||
wrap.createSymlink(new Path("beta"), new Path("/alphaLink/betaLink"),
|
|
||||||
false);
|
|
||||||
statuses = wrap.globStatus(new Path("/alpha/betaLi*"),
|
|
||||||
new AcceptAllPathFilter());
|
new AcceptAllPathFilter());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Assert.assertEquals("/alpha/betaLink",
|
Assert.assertEquals(USER_DIR + "/alpha/beta", statuses[0].getPath()
|
||||||
statuses[0].getPath().toUri().getPath());
|
.toUri().getPath());
|
||||||
|
// Test glob through symlink
|
||||||
|
statuses = wrap.globStatus(new Path(USER_DIR + "/alphaLink/*"),
|
||||||
|
new AcceptAllPathFilter());
|
||||||
|
Assert.assertEquals(1, statuses.length);
|
||||||
|
Assert.assertEquals(USER_DIR + "/alphaLink/beta", statuses[0].getPath()
|
||||||
|
.toUri().getPath());
|
||||||
|
// If the terminal path component in a globbed path is a symlink,
|
||||||
|
// we don't dereference that link.
|
||||||
|
wrap.createSymlink(new Path("beta"), new Path(USER_DIR
|
||||||
|
+ "/alphaLink/betaLink"), false);
|
||||||
|
statuses = wrap.globStatus(new Path(USER_DIR + "/alpha/betaLi*"),
|
||||||
|
new AcceptAllPathFilter());
|
||||||
|
Assert.assertEquals(1, statuses.length);
|
||||||
|
Assert.assertEquals(USER_DIR + "/alpha/betaLink", statuses[0].getPath()
|
||||||
|
.toUri().getPath());
|
||||||
// todo: test symlink-to-symlink-to-dir, etc.
|
// todo: test symlink-to-symlink-to-dir, etc.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -899,58 +884,64 @@ public class TestGlobPaths {
|
||||||
*
|
*
|
||||||
* Also test globbing dangling symlinks. It should NOT throw any exceptions!
|
* Also test globbing dangling symlinks. It should NOT throw any exceptions!
|
||||||
*/
|
*/
|
||||||
private static class TestGlobWithSymlinksToSymlinks
|
private static class TestGlobWithSymlinksToSymlinks implements
|
||||||
implements FSTestWrapperGlobTest {
|
FSTestWrapperGlobTest {
|
||||||
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
// Test that globbing through a symlink to a symlink to a directory
|
// Test that globbing through a symlink to a symlink to a directory
|
||||||
// fully resolves
|
// fully resolves
|
||||||
wrap.mkdir(new Path("/alpha"), FsPermission.getDirDefault(), false);
|
wrap.mkdir(new Path(USER_DIR + "/alpha"), FsPermission.getDirDefault(),
|
||||||
wrap.createSymlink(new Path("/alpha"), new Path("/alphaLink"), false);
|
false);
|
||||||
wrap.createSymlink(new Path("/alphaLink"),
|
wrap.createSymlink(new Path(USER_DIR + "/alpha"), new Path(USER_DIR
|
||||||
new Path("/alphaLinkLink"), false);
|
+ "/alphaLink"), false);
|
||||||
wrap.mkdir(new Path("/alpha/beta"), FsPermission.getDirDefault(), false);
|
wrap.createSymlink(new Path(USER_DIR + "/alphaLink"), new Path(USER_DIR
|
||||||
|
+ "/alphaLinkLink"), false);
|
||||||
|
wrap.mkdir(new Path(USER_DIR + "/alpha/beta"),
|
||||||
|
FsPermission.getDirDefault(), false);
|
||||||
// Test glob through symlink to a symlink to a directory
|
// Test glob through symlink to a symlink to a directory
|
||||||
FileStatus statuses[] =
|
FileStatus statuses[] = wrap.globStatus(new Path(USER_DIR
|
||||||
wrap.globStatus(new Path("/alphaLinkLink"), new AcceptAllPathFilter());
|
+ "/alphaLinkLink"), new AcceptAllPathFilter());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Assert.assertEquals("/alphaLinkLink",
|
Assert.assertEquals(USER_DIR + "/alphaLinkLink", statuses[0].getPath()
|
||||||
statuses[0].getPath().toUri().getPath());
|
.toUri().getPath());
|
||||||
statuses =
|
statuses = wrap.globStatus(new Path(USER_DIR + "/alphaLinkLink/*"),
|
||||||
wrap.globStatus(new Path("/alphaLinkLink/*"), new AcceptAllPathFilter());
|
new AcceptAllPathFilter());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Assert.assertEquals("/alphaLinkLink/beta",
|
Assert.assertEquals(USER_DIR + "/alphaLinkLink/beta", statuses[0]
|
||||||
statuses[0].getPath().toUri().getPath());
|
.getPath().toUri().getPath());
|
||||||
// Test glob of dangling symlink (theta does not actually exist)
|
// Test glob of dangling symlink (theta does not actually exist)
|
||||||
wrap.createSymlink(new Path("theta"), new Path("/alpha/kappa"), false);
|
wrap.createSymlink(new Path(USER_DIR + "theta"), new Path(USER_DIR
|
||||||
statuses = wrap.globStatus(new Path("/alpha/kappa/kappa"),
|
+ "/alpha/kappa"), false);
|
||||||
new AcceptAllPathFilter());
|
statuses = wrap.globStatus(new Path(USER_DIR + "/alpha/kappa/kappa"),
|
||||||
|
new AcceptAllPathFilter());
|
||||||
Assert.assertNull(statuses);
|
Assert.assertNull(statuses);
|
||||||
// Test glob of symlinks
|
// Test glob of symlinks
|
||||||
wrap.createFile("/alpha/beta/gamma");
|
wrap.createFile(USER_DIR + "/alpha/beta/gamma");
|
||||||
wrap.createSymlink(new Path("gamma"),
|
wrap.createSymlink(new Path(USER_DIR + "gamma"), new Path(USER_DIR
|
||||||
new Path("/alpha/beta/gammaLink"), false);
|
+ "/alpha/beta/gammaLink"), false);
|
||||||
wrap.createSymlink(new Path("gammaLink"),
|
wrap.createSymlink(new Path(USER_DIR + "gammaLink"), new Path(USER_DIR
|
||||||
new Path("/alpha/beta/gammaLinkLink"), false);
|
+ "/alpha/beta/gammaLinkLink"), false);
|
||||||
wrap.createSymlink(new Path("gammaLinkLink"),
|
wrap.createSymlink(new Path(USER_DIR + "gammaLinkLink"), new Path(
|
||||||
new Path("/alpha/beta/gammaLinkLinkLink"), false);
|
USER_DIR + "/alpha/beta/gammaLinkLinkLink"), false);
|
||||||
statuses = wrap.globStatus(new Path("/alpha/*/gammaLinkLinkLink"),
|
statuses = wrap.globStatus(new Path(USER_DIR
|
||||||
new AcceptAllPathFilter());
|
+ "/alpha/*/gammaLinkLinkLink"), new AcceptAllPathFilter());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Assert.assertEquals("/alpha/beta/gammaLinkLinkLink",
|
Assert.assertEquals(USER_DIR + "/alpha/beta/gammaLinkLinkLink",
|
||||||
statuses[0].getPath().toUri().getPath());
|
statuses[0].getPath().toUri().getPath());
|
||||||
statuses = wrap.globStatus(new Path("/alpha/beta/*"),
|
statuses = wrap.globStatus(new Path(USER_DIR + "/alpha/beta/*"),
|
||||||
new AcceptAllPathFilter());
|
new AcceptAllPathFilter());
|
||||||
Assert.assertEquals("/alpha/beta/gamma;/alpha/beta/gammaLink;" +
|
Assert.assertEquals(USER_DIR + "/alpha/beta/gamma;" + USER_DIR
|
||||||
"/alpha/beta/gammaLinkLink;/alpha/beta/gammaLinkLinkLink",
|
+ "/alpha/beta/gammaLink;" + USER_DIR + "/alpha/beta/gammaLinkLink;"
|
||||||
|
+ USER_DIR + "/alpha/beta/gammaLinkLinkLink",
|
||||||
TestPath.mergeStatuses(statuses));
|
TestPath.mergeStatuses(statuses));
|
||||||
// Let's create two symlinks that point to each other, and glob on them.
|
// Let's create two symlinks that point to each other, and glob on them.
|
||||||
wrap.createSymlink(new Path("tweedledee"),
|
wrap.createSymlink(new Path(USER_DIR + "tweedledee"), new Path(USER_DIR
|
||||||
new Path("/tweedledum"), false);
|
+ "/tweedledum"), false);
|
||||||
wrap.createSymlink(new Path("tweedledum"),
|
wrap.createSymlink(new Path(USER_DIR + "tweedledum"), new Path(USER_DIR
|
||||||
new Path("/tweedledee"), false);
|
+ "/tweedledee"), false);
|
||||||
statuses = wrap.globStatus(new Path("/tweedledee/unobtainium"),
|
statuses = wrap.globStatus(
|
||||||
new AcceptAllPathFilter());
|
new Path(USER_DIR + "/tweedledee/unobtainium"),
|
||||||
|
new AcceptAllPathFilter());
|
||||||
Assert.assertNull(statuses);
|
Assert.assertNull(statuses);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -968,34 +959,39 @@ public class TestGlobPaths {
|
||||||
/**
|
/**
|
||||||
* Test globbing symlinks with a custom PathFilter
|
* Test globbing symlinks with a custom PathFilter
|
||||||
*/
|
*/
|
||||||
private static class TestGlobSymlinksWithCustomPathFilter
|
private static class TestGlobSymlinksWithCustomPathFilter implements
|
||||||
implements FSTestWrapperGlobTest {
|
FSTestWrapperGlobTest {
|
||||||
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
// Test that globbing through a symlink to a symlink to a directory
|
// Test that globbing through a symlink to a symlink to a directory
|
||||||
// fully resolves
|
// fully resolves
|
||||||
wrap.mkdir(new Path("/alpha"), FsPermission.getDirDefault(), false);
|
wrap.mkdir(new Path(USER_DIR + "/alpha"), FsPermission.getDirDefault(),
|
||||||
wrap.createSymlink(new Path("/alpha"), new Path("/alphaLinkz"), false);
|
false);
|
||||||
wrap.mkdir(new Path("/alpha/beta"), FsPermission.getDirDefault(), false);
|
wrap.createSymlink(new Path(USER_DIR + "/alpha"), new Path(USER_DIR
|
||||||
wrap.mkdir(new Path("/alpha/betaz"), FsPermission.getDirDefault(), false);
|
+ "/alphaLinkz"), false);
|
||||||
// Test glob through symlink to a symlink to a directory, with a PathFilter
|
wrap.mkdir(new Path(USER_DIR + "/alpha/beta"),
|
||||||
FileStatus statuses[] =
|
FsPermission.getDirDefault(), false);
|
||||||
wrap.globStatus(new Path("/alpha/beta"), new AcceptPathsEndingInZ());
|
wrap.mkdir(new Path(USER_DIR + "/alpha/betaz"),
|
||||||
|
FsPermission.getDirDefault(), false);
|
||||||
|
// Test glob through symlink to a symlink to a directory, with a
|
||||||
|
// PathFilter
|
||||||
|
FileStatus statuses[] = wrap.globStatus(
|
||||||
|
new Path(USER_DIR + "/alpha/beta"), new AcceptPathsEndingInZ());
|
||||||
Assert.assertNull(statuses);
|
Assert.assertNull(statuses);
|
||||||
statuses =
|
statuses = wrap.globStatus(new Path(USER_DIR + "/alphaLinkz/betaz"),
|
||||||
wrap.globStatus(new Path("/alphaLinkz/betaz"), new AcceptPathsEndingInZ());
|
new AcceptPathsEndingInZ());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Assert.assertEquals("/alphaLinkz/betaz",
|
Assert.assertEquals(USER_DIR + "/alphaLinkz/betaz", statuses[0].getPath()
|
||||||
statuses[0].getPath().toUri().getPath());
|
.toUri().getPath());
|
||||||
statuses =
|
statuses = wrap.globStatus(new Path(USER_DIR + "/*/*"),
|
||||||
wrap.globStatus(new Path("/*/*"), new AcceptPathsEndingInZ());
|
new AcceptPathsEndingInZ());
|
||||||
Assert.assertEquals("/alpha/betaz;/alphaLinkz/betaz",
|
Assert.assertEquals(USER_DIR + "/alpha/betaz;" + USER_DIR
|
||||||
TestPath.mergeStatuses(statuses));
|
+ "/alphaLinkz/betaz", TestPath.mergeStatuses(statuses));
|
||||||
statuses =
|
statuses = wrap.globStatus(new Path(USER_DIR + "/*/*"),
|
||||||
wrap.globStatus(new Path("/*/*"), new AcceptAllPathFilter());
|
new AcceptAllPathFilter());
|
||||||
Assert.assertEquals("/alpha/beta;/alpha/betaz;" +
|
Assert.assertEquals(USER_DIR + "/alpha/beta;" + USER_DIR
|
||||||
"/alphaLinkz/beta;/alphaLinkz/betaz",
|
+ "/alpha/betaz;" + USER_DIR + "/alphaLinkz/beta;" + USER_DIR
|
||||||
TestPath.mergeStatuses(statuses));
|
+ "/alphaLinkz/betaz", TestPath.mergeStatuses(statuses));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1012,24 +1008,25 @@ public class TestGlobPaths {
|
||||||
/**
|
/**
|
||||||
* Test that globStatus fills in the scheme even when it is not provided.
|
* Test that globStatus fills in the scheme even when it is not provided.
|
||||||
*/
|
*/
|
||||||
private static class TestGlobFillsInScheme
|
private static class TestGlobFillsInScheme implements FSTestWrapperGlobTest {
|
||||||
implements FSTestWrapperGlobTest {
|
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
||||||
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
|
||||||
throws Exception {
|
throws Exception {
|
||||||
// Verify that the default scheme is hdfs, when we don't supply one.
|
// Verify that the default scheme is hdfs, when we don't supply one.
|
||||||
wrap.mkdir(new Path("/alpha"), FsPermission.getDirDefault(), false);
|
wrap.mkdir(new Path(USER_DIR + "/alpha"), FsPermission.getDirDefault(),
|
||||||
wrap.createSymlink(new Path("/alpha"), new Path("/alphaLink"), false);
|
false);
|
||||||
FileStatus statuses[] =
|
wrap.createSymlink(new Path(USER_DIR + "/alpha"), new Path(USER_DIR
|
||||||
wrap.globStatus(new Path("/alphaLink"), new AcceptAllPathFilter());
|
+ "/alphaLink"), false);
|
||||||
|
FileStatus statuses[] = wrap.globStatus(
|
||||||
|
new Path(USER_DIR + "/alphaLink"), new AcceptAllPathFilter());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Path path = statuses[0].getPath();
|
Path path = statuses[0].getPath();
|
||||||
Assert.assertEquals("/alphaLink", path.toUri().getPath());
|
Assert.assertEquals(USER_DIR + "/alphaLink", path.toUri().getPath());
|
||||||
Assert.assertEquals("hdfs", path.toUri().getScheme());
|
Assert.assertEquals("hdfs", path.toUri().getScheme());
|
||||||
if (fc != null) {
|
if (fc != null) {
|
||||||
// If we're using FileContext, then we can list a file:/// URI.
|
// If we're using FileContext, then we can list a file:/// URI.
|
||||||
// Since everyone should have the root directory, we list that.
|
// Since everyone should have the root directory, we list that.
|
||||||
statuses =
|
statuses = wrap.globStatus(new Path("file:///"),
|
||||||
wrap.globStatus(new Path("file:///"), new AcceptAllPathFilter());
|
new AcceptAllPathFilter());
|
||||||
Assert.assertEquals(1, statuses.length);
|
Assert.assertEquals(1, statuses.length);
|
||||||
Path filePath = statuses[0].getPath();
|
Path filePath = statuses[0].getPath();
|
||||||
Assert.assertEquals("file", filePath.toUri().getScheme());
|
Assert.assertEquals("file", filePath.toUri().getScheme());
|
||||||
|
@ -1050,4 +1047,43 @@ public class TestGlobPaths {
|
||||||
public void testGlobFillsInSchemeOnFC() throws Exception {
|
public void testGlobFillsInSchemeOnFC() throws Exception {
|
||||||
testOnFileContext(new TestGlobFillsInScheme());
|
testOnFileContext(new TestGlobFillsInScheme());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test that globStatus works with relative paths.
|
||||||
|
**/
|
||||||
|
private static class TestRelativePath implements FSTestWrapperGlobTest {
|
||||||
|
public void run(FSTestWrapper wrap, FileSystem fs, FileContext fc)
|
||||||
|
throws Exception {
|
||||||
|
String[] files = new String[] { "a", "abc", "abc.p", "bacd" };
|
||||||
|
|
||||||
|
Path[] path = new Path[files.length];
|
||||||
|
for(int i=0; i < files.length; i++) {
|
||||||
|
path[i] = wrap.makeQualified(new Path(files[i]));
|
||||||
|
wrap.mkdir(path[i], FsPermission.getDirDefault(), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
Path patternPath = new Path("a*");
|
||||||
|
Path[] globResults = FileUtil.stat2Paths(wrap.globStatus(patternPath,
|
||||||
|
new AcceptAllPathFilter()),
|
||||||
|
patternPath);
|
||||||
|
|
||||||
|
for(int i=0; i < globResults.length; i++) {
|
||||||
|
globResults[i] = wrap.makeQualified(globResults[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
assertEquals(globResults.length, 3);
|
||||||
|
assertEquals(USER_DIR + "/a;" + USER_DIR + "/abc;" + USER_DIR + "/abc.p",
|
||||||
|
TestPath.mergeStatuses(globResults));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRelativePathOnFS() throws Exception {
|
||||||
|
testOnFileSystem(new TestRelativePath());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRelativePathOnFC() throws Exception {
|
||||||
|
testOnFileContext(new TestRelativePath());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue