From 85bd8e0b728408185a5717361bdd757f46a4709e Mon Sep 17 00:00:00 2001 From: Eli Collins Date: Tue, 16 Oct 2012 02:25:25 +0000 Subject: [PATCH] HDFS-4058. DirectoryScanner may fail with IOOB if the directory scanning threads return out of volume order. Contributed by Eli Collins git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1398617 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../hdfs/server/datanode/DirectoryScanner.java | 18 +++++++++--------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 5ab7edeaa68..b4b720b6d02 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -114,6 +114,9 @@ Release 2.0.3-alpha - Unreleased HDFS-3678. Edit log files are never being purged from 2NN. (atm) + HDFS-4058. DirectoryScanner may fail with IOOB if the directory + scanning threads return out of volume order. (eli) + Release 2.0.2-alpha - 2012-09-07 INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java index 004af654e63..5d870d771e8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java @@ -431,16 +431,16 @@ public class DirectoryScanner implements Runnable { private Map getDiskReport() { // First get list of data directories final List volumes = dataset.getVolumes(); - ArrayList dirReports = - new ArrayList(volumes.size()); - + + // Use an array since the threads may return out of order and + // compilersInProgress#keySet may return out of order as well. + ScanInfoPerBlockPool[] dirReports = new ScanInfoPerBlockPool[volumes.size()]; + Map> compilersInProgress = new HashMap>(); + for (int i = 0; i < volumes.size(); i++) { - if (!isValid(dataset, volumes.get(i))) { - // volume is invalid - dirReports.add(i, null); - } else { + if (isValid(dataset, volumes.get(i))) { ReportCompiler reportCompiler = new ReportCompiler(volumes.get(i)); Future result = @@ -452,7 +452,7 @@ public class DirectoryScanner implements Runnable { for (Entry> report : compilersInProgress.entrySet()) { try { - dirReports.add(report.getKey(), report.getValue().get()); + dirReports[report.getKey()] = report.getValue().get(); } catch (Exception ex) { LOG.error("Error compiling report", ex); // Propagate ex to DataBlockScanner to deal with @@ -465,7 +465,7 @@ public class DirectoryScanner implements Runnable { for (int i = 0; i < volumes.size(); i++) { if (isValid(dataset, volumes.get(i))) { // volume is still valid - list.addAll(dirReports.get(i)); + list.addAll(dirReports[i]); } }