From 5e185702ea87541e54982c791e0ed782a7cf072a Mon Sep 17 00:00:00 2001 From: Eli Collins Date: Sun, 18 Dec 2011 18:12:48 +0000 Subject: [PATCH] HDFS-2335. DataNodeCluster and NNStorage always pull fresh entropy. Contributed by Uma Maheswara Rao G git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1220510 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../main/java/org/apache/hadoop/hdfs/DFSUtil.java | 15 ++++++++++++++- .../hadoop/hdfs/server/datanode/DataNode.java | 3 +-- .../hadoop/hdfs/server/namenode/NNStorage.java | 10 +--------- .../org/apache/hadoop/hdfs/DataNodeCluster.java | 10 +--------- 5 files changed, 20 insertions(+), 21 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c7bd4272336..0afd8f75d46 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -234,6 +234,9 @@ Release 0.23.1 - UNRELEASED HDFS-2675. Reduce warning verbosity when double-closing edit logs (todd) + HDFS-2335. DataNodeCluster and NNStorage always pull fresh entropy. + (Uma Maheswara Rao G via eli) + OPTIMIZATIONS HDFS-2130. Switch default checksum to CRC32C. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index 73bb2190e52..0b6308e8e90 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -30,6 +30,7 @@ import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; +import java.security.SecureRandom; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; @@ -69,11 +70,23 @@ protected Random initialValue() { return new Random(); } }; + + private static final ThreadLocal SECURE_RANDOM = new ThreadLocal() { + @Override + protected SecureRandom initialValue() { + return new SecureRandom(); + } + }; - /** @return a pseudorandom number generator. */ + /** @return a pseudo random number generator. */ public static Random getRandom() { return RANDOM.get(); } + + /** @return a pseudo secure random number generator. */ + public static SecureRandom getSecureRandom() { + return SECURE_RANDOM.get(); + } /** * Compartor for sorting DataNodeInfo[] based on decommissioned states. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index dba65dad461..7b4032556b4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -66,7 +66,6 @@ import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.security.PrivilegedExceptionAction; -import java.security.SecureRandom; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; @@ -1086,7 +1085,7 @@ static String createNewStorageId(int port) { LOG.warn("Could not find ip address of \"default\" inteface."); } - int rand = new SecureRandom().nextInt(Integer.MAX_VALUE); + int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); return "DS-" + rand + "-" + ip + "-" + port + "-" + System.currentTimeMillis(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java index e410df917c5..4ced54447aa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java @@ -26,8 +26,6 @@ import java.io.OutputStream; import java.net.URI; import java.net.UnknownHostException; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -978,13 +976,7 @@ String newBlockPoolID() throws UnknownHostException{ throw e; } - int rand = 0; - try { - rand = SecureRandom.getInstance("SHA1PRNG").nextInt(Integer.MAX_VALUE); - } catch (NoSuchAlgorithmException e) { - LOG.warn("Could not use SecureRandom"); - rand = DFSUtil.getRandom().nextInt(Integer.MAX_VALUE); - } + int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); String bpid = "BP-" + rand + "-"+ ip + "-" + System.currentTimeMillis(); return bpid; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DataNodeCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DataNodeCluster.java index f82986f331b..f3350b988a7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DataNodeCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DataNodeCluster.java @@ -19,10 +19,7 @@ import java.io.IOException; import java.net.UnknownHostException; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; import java.util.Arrays; -import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -234,12 +231,7 @@ static private String getUniqueRackPrefix() { System.out.println("Could not find ip address of \"default\" inteface."); } - int rand = 0; - try { - rand = SecureRandom.getInstance("SHA1PRNG").nextInt(Integer.MAX_VALUE); - } catch (NoSuchAlgorithmException e) { - rand = (new Random()).nextInt(Integer.MAX_VALUE); - } + int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE); return "/Rack-" + rand + "-"+ ip + "-" + System.currentTimeMillis(); }