diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 9d73cfd4bcb..08a537858be 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -49,6 +49,9 @@ Trunk (unreleased changes) HADOOP-8108. Move method getHostPortString() from NameNode to NetUtils. (Brandon Li via jitendra) + HADOOP-8078. Add capability to turn on security in unit tests. (Jaimin Jetly + via jitendra) + BUG FIXES HADOOP-8018. Hudson auto test for HDFS has started throwing javadoc @@ -151,6 +154,9 @@ Release 0.23.3 - UNRELEASED HADOOP-8085. Add RPC metrics to ProtobufRpcEngine. (Hari Mankude via suresh) + HADOOP-8098. KerberosAuthenticatorHandler should use _HOST replacement to + resolve principal name (tucu) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 8438db1724a..fd18b607a25 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -31,11 +31,12 @@ /usr/local ${snappy.prefix}/lib false - + src/test/resources/kdc common true + org.apache.hadoop @@ -92,7 +93,6 @@ jetty-util compile - asm asm @@ -113,7 +113,6 @@ jersey-server compile - tomcat jasper-compiler @@ -300,6 +299,16 @@ + + org.apache.maven.plugins + maven-surefire-plugin + + + ${startKdc} + ${kdc.resource.dir} + + + org.apache.avro avro-maven-plugin @@ -649,5 +658,87 @@ + + + + startKdc + + + startKdc + true + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-os + + enforce + + + + + + mac + unix + + + true + + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + compile + compile + + run + + + + + + + + + + + + + + + + + + + + + + killKdc + test + + run + + + + + + + + + + + + + + diff --git a/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml b/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml index ad98c112f76..270e9517ed2 100644 --- a/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml +++ b/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml @@ -111,7 +111,8 @@

hadoop.http.authentication.kerberos.principal: Indicates the Kerberos principal to be used for HTTP endpoint when using 'kerberos' authentication. The principal short name must be HTTP per Kerberos HTTP SPENGO specification. - The default value is HTTP/localhost@$LOCALHOST. + The default value is HTTP/_HOST@$LOCALHOST, where _HOST -if present- + is replaced with bind address of the HTTP server.

hadoop.http.authentication.kerberos.keytab: Location of the keytab file diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java index 571a296fc9e..979ec7656e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java @@ -100,6 +100,8 @@ public class HttpServer implements FilterContainer { public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf"; static final String ADMINS_ACL = "admins.acl"; + public static final String BIND_ADDRESS = "bind.address"; + private AccessControlList adminsAcl; protected final Server webServer; @@ -243,6 +245,8 @@ public class HttpServer implements FilterContainer { addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); final FilterInitializer[] initializers = getFilterInitializers(conf); if (initializers != null) { + conf = new Configuration(conf); + conf.set(BIND_ADDRESS, bindAddress); for(FilterInitializer c : initializers) { c.initFilter(this, conf); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java index 37fc3be05c9..1509d247fa7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java @@ -17,10 +17,12 @@ */ package org.apache.hadoop.security; +import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import java.io.FileReader; import java.io.IOException; @@ -46,7 +48,7 @@ public class AuthenticationFilterInitializer extends FilterInitializer { static final String PREFIX = "hadoop.http.authentication."; static final String SIGNATURE_SECRET_FILE = AuthenticationFilter.SIGNATURE_SECRET + ".file"; - + /** * Initializes hadoop-auth AuthenticationFilter. *

@@ -90,7 +92,20 @@ public class AuthenticationFilterInitializer extends FilterInitializer { } catch (IOException ex) { throw new RuntimeException("Could not read HTTP signature secret file: " + signatureSecretFile); } - + + //Resolve _HOST into bind address + String bindAddress = conf.get(HttpServer.BIND_ADDRESS); + String principal = filterConfig.get(KerberosAuthenticationHandler.PRINCIPAL); + if (principal != null) { + try { + principal = SecurityUtil.getServerPrincipal(principal, bindAddress); + } + catch (IOException ex) { + throw new RuntimeException("Could not resolve Kerberos principal name: " + ex.toString(), ex); + } + filterConfig.put(KerberosAuthenticationHandler.PRINCIPAL, principal); + } + container.addFilter("authentication", AuthenticationFilter.class.getName(), filterConfig); diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 9390d1093ff..a9684000b61 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -841,7 +841,7 @@ hadoop.http.authentication.kerberos.principal - HTTP/localhost@LOCALHOST + HTTP/_HOST@LOCALHOST Indicates the Kerberos principal to be used for HTTP endpoint. The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java index 2d699ddcf1f..3c12047be21 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java @@ -18,9 +18,11 @@ package org.apache.hadoop.security; import junit.framework.TestCase; +import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -48,6 +50,8 @@ public class TestAuthenticationFilter extends TestCase { AuthenticationFilterInitializer.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath()); + conf.set(HttpServer.BIND_ADDRESS, "barhost"); + FilterContainer container = Mockito.mock(FilterContainer.class); Mockito.doAnswer( new Answer() { @@ -67,7 +71,7 @@ public class TestAuthenticationFilter extends TestCase { assertEquals("hadoop", conf.get("signature.secret")); assertNull(conf.get("cookie.domain")); assertEquals("true", conf.get("simple.anonymous.allowed")); - assertEquals("HTTP/localhost@LOCALHOST", + assertEquals("HTTP/barhost@LOCALHOST", conf.get("kerberos.principal")); assertEquals(System.getProperty("user.home") + "/hadoop.keytab", conf.get("kerberos.keytab")); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithSecurityOn.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithSecurityOn.java new file mode 100644 index 00000000000..3dc69783df9 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithSecurityOn.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.security; + +import java.io.IOException; + +import junit.framework.Assert; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.junit.Assume; +import org.junit.Before; +import org.junit.Test; + +public class TestUGIWithSecurityOn { + + public static boolean isKdcRunning() { + String startKdc = System.getProperty("startKdc"); + if(startKdc == null || !startKdc.equals("true")) { + return false; + } + return true; + } + + @Before + public void testKdcRunning() { + //Tests are skipped if KDC is not running + Assume.assumeTrue(isKdcRunning()); + } + @Test + public void testLogin() throws IOException { + String nn1keyTabFilepath = System.getProperty("kdc.resource.dir") + + "/keytabs/nn1.keytab"; + String user1keyTabFilepath = System.getProperty("kdc.resource.dir") + + "/keytabs/user1.keytab"; + Configuration conf = new Configuration(); + conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + UserGroupInformation.setConfiguration(conf); + + UserGroupInformation ugiNn = UserGroupInformation + .loginUserFromKeytabAndReturnUGI("nn1/localhost@EXAMPLE.COM", + nn1keyTabFilepath); + UserGroupInformation ugiDn = UserGroupInformation + .loginUserFromKeytabAndReturnUGI("user1@EXAMPLE.COM", + user1keyTabFilepath); + + Assert.assertEquals(AuthenticationMethod.KERBEROS, + ugiNn.getAuthenticationMethod()); + Assert.assertEquals(AuthenticationMethod.KERBEROS, + ugiDn.getAuthenticationMethod()); + + try { + UserGroupInformation + .loginUserFromKeytabAndReturnUGI("bogus@EXAMPLE.COM", + nn1keyTabFilepath); + Assert.fail("Login should have failed"); + } catch (Exception ex) { + ex.printStackTrace(); + } + } +} diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java index 036395ea7f5..39ff6808fee 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java @@ -384,10 +384,10 @@ public class TestAccessControlList { assertTrue(acl.isAllAllowed()); UserGroupInformation drwho = - UserGroupInformation.createUserForTesting("drwho@APACHE.ORG", + UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM", new String[] { "aliens" }); UserGroupInformation drwho2 = - UserGroupInformation.createUserForTesting("drwho2@APACHE.ORG", + UserGroupInformation.createUserForTesting("drwho2@EXAMPLE.COM", new String[] { "tardis" }); acl.addUser("drwho"); @@ -413,16 +413,16 @@ public class TestAccessControlList { AccessControlList acl; UserGroupInformation drwho = - UserGroupInformation.createUserForTesting("drwho@APACHE.ORG", + UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM", new String[] { "aliens", "humanoids", "timelord" }); UserGroupInformation susan = - UserGroupInformation.createUserForTesting("susan@APACHE.ORG", + UserGroupInformation.createUserForTesting("susan@EXAMPLE.COM", new String[] { "aliens", "humanoids", "timelord" }); UserGroupInformation barbara = - UserGroupInformation.createUserForTesting("barbara@APACHE.ORG", + UserGroupInformation.createUserForTesting("barbara@EXAMPLE.COM", new String[] { "humans", "teachers" }); UserGroupInformation ian = - UserGroupInformation.createUserForTesting("ian@APACHE.ORG", + UserGroupInformation.createUserForTesting("ian@EXAMPLE.COM", new String[] { "humans", "teachers" }); acl = new AccessControlList("drwho humanoids"); diff --git a/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/dn1.keytab b/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/dn1.keytab new file mode 100644 index 00000000000..d503bb299e6 Binary files /dev/null and b/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/dn1.keytab differ diff --git a/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/nn1.keytab b/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/nn1.keytab new file mode 100644 index 00000000000..40f418f1b69 Binary files /dev/null and b/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/nn1.keytab differ diff --git a/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/user1.keytab b/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/user1.keytab new file mode 100644 index 00000000000..b8994eca210 Binary files /dev/null and b/hadoop-common-project/hadoop-common/src/test/resources/kdc/keytabs/user1.keytab differ diff --git a/hadoop-common-project/hadoop-common/src/test/resources/kdc/killKdc.sh b/hadoop-common-project/hadoop-common/src/test/resources/kdc/killKdc.sh new file mode 100644 index 00000000000..f5561d4b787 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/resources/kdc/killKdc.sh @@ -0,0 +1,3 @@ +#!/bin/sh +ps -ef | grep apacheds | grep -v grep | cut -f4 -d ' ' |xargs kill -9 + diff --git a/hadoop-common-project/hadoop-common/src/test/resources/kdc/ldif/users.ldif b/hadoop-common-project/hadoop-common/src/test/resources/kdc/ldif/users.ldif new file mode 100644 index 00000000000..a3d2704949c --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/resources/kdc/ldif/users.ldif @@ -0,0 +1,78 @@ +dn: dc=example,dc=com +objectClass: dcObject +objectClass: organization +objectClass: top +dc: example +o: example.com + +dn: ou=Users,dc=example,dc=com +objectClass: organizationalUnit +objectClass: top +ou: Users + +dn: uid=user1,ou=Users,dc=example,dc=com +objectClass: top +objectClass: person +objectClass: inetOrgPerson +objectClass: krb5principal +objectClass: krb5kdcentry +cn: user1 Service +sn: Service +uid: user1 +userPassword: secret +krb5PrincipalName: user1@EXAMPLE.COM +krb5KeyVersionNumber: 0 + +dn: uid=krbtgt,ou=Users,dc=example,dc=com +objectClass: top +objectClass: person +objectClass: inetOrgPerson +objectClass: krb5principal +objectClass: krb5kdcentry +cn: KDC Service +sn: Service +uid: krbtgt +userPassword: secret +krb5PrincipalName: krbtgt/EXAMPLE.COM@EXAMPLE.COM +krb5KeyVersionNumber: 0 + +dn: uid=ldap,ou=Users,dc=example,dc=com +objectClass: top +objectClass: person +objectClass: inetOrgPerson +objectClass: krb5principal +objectClass: krb5kdcentry +cn: LDAP +sn: Service +uid: ldap +userPassword: randall +krb5PrincipalName: ldap/localhost@EXAMPLE.COM +krb5KeyVersionNumber: 0 + +dn: uid=nn1,ou=Users,dc=example,dc=com +objectClass: top +objectClass: person +objectClass: inetOrgPerson +objectClass: krb5principal +objectClass: krb5kdcentry +cn: NameNode Service +sn: Service +uid: nn1 +userPassword: secret +krb5PrincipalName: nn1/localhost@EXAMPLE.COM +krb5KeyVersionNumber: 0 + +dn: uid=dn1,ou=Users,dc=example,dc=com +objectClass: top +objectClass: person +objectClass: inetOrgPerson +objectClass: krb5principal +objectClass: krb5kdcentry +cn: DataNode Service +sn: Service +uid: dn1 +userPassword: secret +krb5PrincipalName: dn1/localhost@EXAMPLE.COM +krb5KeyVersionNumber: 0 + + diff --git a/hadoop-common-project/hadoop-common/src/test/resources/kdc/server.xml b/hadoop-common-project/hadoop-common/src/test/resources/kdc/server.xml new file mode 100644 index 00000000000..bb8c52a9976 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/resources/kdc/server.xml @@ -0,0 +1,258 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + #directoryService + + + + + + + + + + + + + + + + + + #directoryService + + + + + + + + + + + + + + example.com + apache.org + + + + + + + + + + + + + + #ldapServer + + + + + diff --git a/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf b/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf index 121ac6d9b98..3182436d487 100644 --- a/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf +++ b/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf @@ -14,15 +14,23 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# +# + [libdefaults] - default_realm = APACHE.ORG - udp_preference_limit = 1 - extra_addresses = 127.0.0.1 + default_realm = EXAMPLE.COM + allow_weak_crypto = true + default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1 + default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1 + [realms] - APACHE.ORG = { - admin_server = localhost:88 - kdc = localhost:88 - } + EXAMPLE.COM = { + kdc = localhost:60088 + } + [domain_realm] - localhost = APACHE.ORG + .example.com = EXAMPLE.COM + example.com = EXAMPLE.COM +[login] + krb4_convert = true + krb4_get_tickets = false + diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 0d586cfc1e1..e64aa999ee5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -201,6 +201,8 @@ Release 0.23.3 - UNRELEASED OPTIMIZATIONS + HDFS-3024. Improve performance of stringification in addStoredBlock (todd) + BUG FIXES HDFS-2481. Unknown protocol: org.apache.hadoop.hdfs.protocol.ClientProtocol. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/Block.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/Block.java index f6bf6b0b715..33c86f96d89 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/Block.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/Block.java @@ -150,6 +150,14 @@ public class Block implements Writable, Comparable { return getBlockName() + "_" + getGenerationStamp(); } + public void appendStringTo(StringBuilder sb) { + sb.append(BLOCK_FILE_PREFIX) + .append(blockId) + .append("_") + .append(getGenerationStamp()); + } + + ///////////////////////////////////// // Writable ///////////////////////////////////// diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java index beeb2c8fa21..6509f3d7fa9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.blockmanagement; import java.io.IOException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import org.apache.hadoop.hdfs.protocol.Block; @@ -114,14 +115,19 @@ public class BlockInfoUnderConstruction extends BlockInfo { @Override public String toString() { - final StringBuilder b = new StringBuilder(getClass().getSimpleName()); - b.append("[") - .append(expectedLocation) - .append("|") - .append(state) - .append("]"); + final StringBuilder b = new StringBuilder(50); + appendStringTo(b); return b.toString(); } + + @Override + public void appendStringTo(StringBuilder sb) { + sb.append("ReplicaUnderConstruction[") + .append(expectedLocation) + .append("|") + .append(state) + .append("]"); + } } /** @@ -269,11 +275,29 @@ public class BlockInfoUnderConstruction extends BlockInfo { @Override public String toString() { - final StringBuilder b = new StringBuilder(super.toString()); - b.append("{blockUCState=").append(blockUCState) - .append(", primaryNodeIndex=").append(primaryNodeIndex) - .append(", replicas=").append(replicas) - .append("}"); + final StringBuilder b = new StringBuilder(100); + appendStringTo(b); return b.toString(); } + + @Override + public void appendStringTo(StringBuilder sb) { + super.appendStringTo(sb); + appendUCParts(sb); + } + + private void appendUCParts(StringBuilder sb) { + sb.append("{blockUCState=").append(blockUCState) + .append(", primaryNodeIndex=").append(primaryNodeIndex) + .append(", replicas=["); + Iterator iter = replicas.iterator(); + if (iter.hasNext()) { + iter.next().appendStringTo(sb); + while (iter.hasNext()) { + sb.append(", "); + iter.next().appendStringTo(sb); + } + } + sb.append("]}"); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 806a3c5e0cb..9a76f1e43d7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -1967,9 +1967,7 @@ assert storedBlock.findDatanode(dn) < 0 : "Block " + block if (added) { curReplicaDelta = 1; if (logEveryBlock) { - NameNode.stateChangeLog.info("BLOCK* addStoredBlock: " - + "blockMap updated: " + node.getName() + " is added to " + - storedBlock + " size " + storedBlock.getNumBytes()); + logAddStoredBlock(storedBlock, node); } } else { curReplicaDelta = 0; @@ -2031,6 +2029,20 @@ assert storedBlock.findDatanode(dn) < 0 : "Block " + block return storedBlock; } + private void logAddStoredBlock(BlockInfo storedBlock, DatanodeDescriptor node) { + if (!NameNode.stateChangeLog.isInfoEnabled()) { + return; + } + + StringBuilder sb = new StringBuilder(500); + sb.append("BLOCK* addStoredBlock: blockMap updated: ") + .append(node.getName()) + .append(" is added to "); + storedBlock.appendStringTo(sb); + sb.append(" size " ) + .append(storedBlock.getNumBytes()); + NameNode.stateChangeLog.info(sb); + } /** * Invalidate corrupt replicas. *

diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index ef661b3f7fa..fd3064752df 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -194,6 +194,12 @@ Release 0.23.2 - UNRELEASED MAPREDUCE-3913. RM application webpage is unresponsive after 2000 jobs (Jason Lowe via tgraves) + MAPREDUCE-3922. Fixed build to not compile 32bit container-executor binary + by default on all platforms. (Hitesh Shah via vinodkv) + + MAPREDUCE-3790 Broken pipe on streaming job can lead to truncated output for + a successful job (Jason Lowe via bobby) + Release 0.23.1 - 2012-02-17 INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml index f3c2ab6fab9..1272dde76fa 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml @@ -28,6 +28,7 @@ ${project.parent.parent.basedir} /etc/hadoop + @@ -74,7 +75,7 @@ CFLAGS - -DHADOOP_CONF_DIR=${container-executor.conf.dir} -m32 + -DHADOOP_CONF_DIR=${container-executor.conf.dir} ${container-executor.additional_cflags} ${project.build.directory}/native/container-executor diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java index 0fe9dbd0861..c8d099ec9a7 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java @@ -521,11 +521,15 @@ public abstract class PipeMapRed { LOG.info("mapRedFinished"); return; } - try { - if (clientOut_ != null) { + if (clientOut_ != null) { + try { clientOut_.flush(); clientOut_.close(); + } catch (IOException io) { + LOG.warn(io); } + } + try { waitOutputThreads(); } catch (IOException io) { LOG.warn(io); diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/OutputOnlyApp.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/OutputOnlyApp.java new file mode 100644 index 00000000000..0a659ce187a --- /dev/null +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/OutputOnlyApp.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.streaming; + +import java.io.IOException; + +/** + * An application that outputs a specified number of lines + * without consuming any input. + */ +public class OutputOnlyApp { + public static void main(String[] args) throws IOException { + if (args.length < 1) { + System.err.println("Usage: OutputOnlyApp NUMRECORDS"); + return; + } + int numRecords = Integer.parseInt(args[0]); + while (numRecords-- > 0) { + System.out.println("key\tvalue"); + } + } +} diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java new file mode 100644 index 00000000000..bd50ae05429 --- /dev/null +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.streaming; + +import static org.junit.Assert.*; + +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; + +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.HdfsConfiguration; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.junit.Test; + +public class TestUnconsumedInput { + protected final int EXPECTED_OUTPUT_SIZE = 10000; + protected File INPUT_FILE = new File("stream_uncinput_input.txt"); + protected File OUTPUT_DIR = new File("stream_uncinput_out"); + // map parses input lines and generates count entries for each word. + protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n"; + protected String map = UtilTest.makeJavaCommand(OutputOnlyApp.class, + new String[]{Integer.toString(EXPECTED_OUTPUT_SIZE)}); + + private StreamJob job; + + public TestUnconsumedInput() throws IOException + { + UtilTest utilTest = new UtilTest(getClass().getName()); + utilTest.checkUserDir(); + utilTest.redirectIfAntJunit(); + } + + protected void createInput() throws IOException + { + DataOutputStream out = new DataOutputStream( + new FileOutputStream(INPUT_FILE.getAbsoluteFile())); + for (int i=0; i<10000; ++i) { + out.write(input.getBytes("UTF-8")); + } + out.close(); + } + + protected String[] genArgs() { + return new String[] { + "-input", INPUT_FILE.getAbsolutePath(), + "-output", OUTPUT_DIR.getAbsolutePath(), + "-mapper", map, + "-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer", + "-numReduceTasks", "0", + "-jobconf", "mapreduce.task.files.preserve.failedtasks=true", + "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp") + }; + } + + @Test + public void testUnconsumedInput() throws Exception + { + String outFileName = "part-00000"; + File outFile = null; + try { + try { + FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile()); + } catch (Exception e) { + } + + createInput(); + + // setup config to ignore unconsumed input + Configuration conf = new Configuration(); + conf.set("stream.minRecWrittenToEnableSkip_", "0"); + + job = new StreamJob(); + job.setConf(conf); + int exitCode = job.run(genArgs()); + assertEquals("Job failed", 0, exitCode); + outFile = new File(OUTPUT_DIR, outFileName).getAbsoluteFile(); + String output = StreamUtil.slurp(outFile); + assertEquals("Output was truncated", EXPECTED_OUTPUT_SIZE, + StringUtils.countMatches(output, "\t")); + } finally { + INPUT_FILE.delete(); + FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile()); + } + } +}