diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index dc917026be4..0617a5b1238 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -68,6 +68,8 @@ Release 0.23.2 - UNRELEASED
NEW FEATURES
IMPROVEMENTS
+ HADOOP-8048. Allow merging of Credentials (Daryn Sharp via tgraves)
+
HADOOP-8032. mvn site:stage-deploy should be able to use the scp protocol
to stage documents (Ravi Prakash via tgraves)
@@ -75,6 +77,8 @@ Release 0.23.2 - UNRELEASED
(szetszwo)
OPTIMIZATIONS
+ HADOOP-8071. Avoid an extra packet in client code when nagling is
+ disabled. (todd)
BUG FIXES
@@ -85,6 +89,14 @@ Release 0.23.2 - UNRELEASED
HADOOP-8035 Hadoop Maven site is inefficient and runs phases redundantly
(abayer via tucu)
+ HADOOP-8051 HttpFS documentation it is not wired to the generated site (tucu)
+
+ HADOOP-8055. Hadoop tarball distribution lacks a core-site.xml (harsh)
+
+ HADOOP-8052. Hadoop Metrics2 should emit Float.MAX_VALUE (instead of
+ Double.MAX_VALUE) to avoid making Ganglia's gmetad core. (Varun Kapoor
+ via mattf)
+
Release 0.23.1 - 2012-02-08
INCOMPATIBLE CHANGES
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/core-site.xml b/hadoop-common-project/hadoop-common/src/main/conf/core-site.xml
new file mode 100644
index 00000000000..d2ddf893e49
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/conf/core-site.xml
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+
+
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
index eb8e704f5f5..bc751f20f2d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
@@ -799,9 +799,12 @@ public class Client {
header.write(d);
call.rpcRequest.write(d);
byte[] data = d.getData();
- int dataLength = d.getLength();
- out.writeInt(dataLength); //first put the data length
- out.write(data, 0, dataLength);//write the data
+ int dataLength = d.getLength() - 4;
+ data[0] = (byte)((dataLength >>> 24) & 0xff);
+ data[1] = (byte)((dataLength >>> 16) & 0xff);
+ data[2] = (byte)((dataLength >>> 8) & 0xff);
+ data[3] = (byte)(dataLength & 0xff);
+ out.write(data, 0, dataLength + 4);//write the data
out.flush();
}
} catch(IOException e) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java
index f154269698a..589062a691c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java
@@ -143,8 +143,16 @@ public class SampleStat {
@SuppressWarnings("PublicInnerClass")
public static class MinMax {
- private double min = Double.MAX_VALUE;
- private double max = Double.MIN_VALUE;
+ // Float.MAX_VALUE is used rather than Double.MAX_VALUE, even though the
+ // min and max variables are of type double.
+ // Float.MAX_VALUE is big enough, and using Double.MAX_VALUE makes
+ // Ganglia core due to buffer overflow.
+ // The same reasoning applies to the MIN_VALUE counterparts.
+ static final double DEFAULT_MIN_VALUE = Float.MAX_VALUE;
+ static final double DEFAULT_MAX_VALUE = Float.MIN_VALUE;
+
+ private double min = DEFAULT_MIN_VALUE;
+ private double max = DEFAULT_MAX_VALUE;
public void add(double value) {
if (value > max) max = value;
@@ -155,8 +163,8 @@ public class SampleStat {
public double max() { return max; }
public void reset() {
- min = Double.MAX_VALUE;
- max = Double.MIN_VALUE;
+ min = DEFAULT_MIN_VALUE;
+ max = DEFAULT_MAX_VALUE;
}
public void reset(MinMax other) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index 152a35496b8..9883604a2f9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -230,14 +230,34 @@ public class Credentials implements Writable {
/**
* Copy all of the credentials from one credential object into another.
+ * Existing secrets and tokens are overwritten.
* @param other the credentials to copy
*/
public void addAll(Credentials other) {
+ addAll(other, true);
+ }
+
+ /**
+ * Copy all of the credentials from one credential object into another.
+ * Existing secrets and tokens are not overwritten.
+ * @param other the credentials to copy
+ */
+ public void mergeAll(Credentials other) {
+ addAll(other, false);
+ }
+
+ private void addAll(Credentials other, boolean overwrite) {
for(Map.Entry secret: other.secretKeysMap.entrySet()) {
- secretKeysMap.put(secret.getKey(), secret.getValue());
+ Text key = secret.getKey();
+ if (!secretKeysMap.containsKey(key) || overwrite) {
+ secretKeysMap.put(key, secret.getValue());
+ }
}
for(Map.Entry> token: other.tokenMap.entrySet()){
- tokenMap.put(token.getKey(), token.getValue());
+ Text key = token.getKey();
+ if (!tokenMap.containsKey(key) || overwrite) {
+ tokenMap.put(key, token.getValue());
+ }
}
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java
index 36ca6bb1664..0fb0ad8ace9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java
@@ -36,8 +36,8 @@ public class TestSampleStat {
assertEquals("mean", 0.0, stat.mean(), EPSILON);
assertEquals("variance", 0.0, stat.variance(), EPSILON);
assertEquals("stddev", 0.0, stat.stddev(), EPSILON);
- assertEquals("min", Double.MAX_VALUE, stat.min(), EPSILON);
- assertEquals("max", Double.MIN_VALUE, stat.max(), EPSILON);
+ assertEquals("min", SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON);
+ assertEquals("max", SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON);
stat.add(3);
assertEquals("num samples", 1L, stat.numSamples());
@@ -60,8 +60,8 @@ public class TestSampleStat {
assertEquals("mean", 0.0, stat.mean(), EPSILON);
assertEquals("variance", 0.0, stat.variance(), EPSILON);
assertEquals("stddev", 0.0, stat.stddev(), EPSILON);
- assertEquals("min", Double.MAX_VALUE, stat.min(), EPSILON);
- assertEquals("max", Double.MIN_VALUE, stat.max(), EPSILON);
+ assertEquals("min", SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON);
+ assertEquals("max", SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java
index 7bedd2d028a..56b5c32521d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java
@@ -137,4 +137,81 @@ public class TestCredentials {
}
tmpFileName.delete();
}
+
+ static Text secret[] = {
+ new Text("secret1"),
+ new Text("secret2"),
+ new Text("secret3"),
+ new Text("secret4")
+ };
+ static Text service[] = {
+ new Text("service1"),
+ new Text("service2"),
+ new Text("service3"),
+ new Text("service4")
+ };
+ static Token> token[] = {
+ new Token(),
+ new Token(),
+ new Token(),
+ new Token()
+ };
+
+ @Test
+ public void addAll() {
+ Credentials creds = new Credentials();
+ creds.addToken(service[0], token[0]);
+ creds.addToken(service[1], token[1]);
+ creds.addSecretKey(secret[0], secret[0].getBytes());
+ creds.addSecretKey(secret[1], secret[1].getBytes());
+
+ Credentials credsToAdd = new Credentials();
+ // one duplicate with different value, one new
+ credsToAdd.addToken(service[0], token[3]);
+ credsToAdd.addToken(service[2], token[2]);
+ credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
+ credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
+
+ creds.addAll(credsToAdd);
+ assertEquals(3, creds.numberOfTokens());
+ assertEquals(3, creds.numberOfSecretKeys());
+ // existing token & secret should be overwritten
+ assertEquals(token[3], creds.getToken(service[0]));
+ assertEquals(secret[3], new Text(creds.getSecretKey(secret[0])));
+ // non-duplicate token & secret should be present
+ assertEquals(token[1], creds.getToken(service[1]));
+ assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
+ // new token & secret should be added
+ assertEquals(token[2], creds.getToken(service[2]));
+ assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
+ }
+
+ @Test
+ public void mergeAll() {
+ Credentials creds = new Credentials();
+ creds.addToken(service[0], token[0]);
+ creds.addToken(service[1], token[1]);
+ creds.addSecretKey(secret[0], secret[0].getBytes());
+ creds.addSecretKey(secret[1], secret[1].getBytes());
+
+ Credentials credsToAdd = new Credentials();
+ // one duplicate with different value, one new
+ credsToAdd.addToken(service[0], token[3]);
+ credsToAdd.addToken(service[2], token[2]);
+ credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
+ credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
+
+ creds.mergeAll(credsToAdd);
+ assertEquals(3, creds.numberOfTokens());
+ assertEquals(3, creds.numberOfSecretKeys());
+ // existing token & secret should not be overwritten
+ assertEquals(token[0], creds.getToken(service[0]));
+ assertEquals(secret[0], new Text(creds.getSecretKey(secret[0])));
+ // non-duplicate token & secret should be present
+ assertEquals(token[1], creds.getToken(service[1]));
+ assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
+ // new token & secret should be added
+ assertEquals(token[2], creds.getToken(service[2]));
+ assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
}
+}
\ No newline at end of file
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/site.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/site.xml
index d6424ebc2f9..01b35e0ae15 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/site.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/site.xml
@@ -14,21 +14,16 @@
-->
-
+
+ org.apache.maven.skins
+ maven-stylus-skin
+ 1.2
+
-
-
-
-
-
- org.apache.maven.skins
- maven-stylus-skin
- 1.2
-
-
-
-
-
-
+
+
+
+
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index de7bc757bc0..73febce94d2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -130,8 +130,14 @@ Release 0.23.2 - UNRELEASED
NEW FEATURES
+ HDFS-2943. Expose last checkpoint time and transaction stats as JMX
+ metrics. (atm)
+
IMPROVEMENTS
+ HDFS-2931. Switch DataNode's BlockVolumeChoosingPolicy to private-audience.
+ (harsh via szetszwo)
+
OPTIMIZATIONS
BUG FIXES
@@ -140,6 +146,14 @@ Release 0.23.2 - UNRELEASED
HDFS-2764. TestBackupNode is racy. (atm)
+ HDFS-2869. Fix an error in the webhdfs docs for the mkdir op (harsh)
+
+ HDFS-776. Fix exception handling in Balancer. (Uma Maheswara Rao G
+ via szetszwo)
+
+ HDFS-2815. Namenode sometimes oes not come out of safemode during
+ NN crash + restart. (Uma Maheswara Rao via suresh)
+
Release 0.23.1 - 2012-02-08
INCOMPATIBLE CHANGES
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml
index 32a26f0c4a0..43764ca2758 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml
@@ -349,7 +349,7 @@ Hello, webhdfs user!
- Submit a HTTP PUT request.
The client receives a response with a
boolean
JSON object: