From 719b719e3dd622ebc5cea46c63e2dbfdc397863c Mon Sep 17 00:00:00 2001 From: Robert Joseph Evans Date: Wed, 25 Apr 2012 20:49:41 +0000 Subject: [PATCH] MAPREDUCE-4194. ConcurrentModificationError in DirectoryCollection (Jonathan Eagles via bobby) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1330552 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 + .../nodemanager/DirectoryCollection.java | 25 +++---- .../nodemanager/TestDirectoryCollection.java | 68 +++++++++++++++++++ 3 files changed, 80 insertions(+), 16 deletions(-) create mode 100644 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDirectoryCollection.java diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index cfcda112267..fbadd762ad3 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -406,6 +406,9 @@ Release 0.23.3 - UNRELEASED MAPREDUCE-4133. MR over viewfs is broken (John George via bobby) + MAPREDUCE-4194. ConcurrentModificationError in DirectoryCollection + (Jonathan Eagles via bobby) + Release 0.23.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java index 67ed4618a0e..997156741ae 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java @@ -19,10 +19,9 @@ package org.apache.hadoop.yarn.server.nodemanager; import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.Collections; import java.util.List; -import java.util.ListIterator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -41,23 +40,22 @@ class DirectoryCollection { private int numFailures; public DirectoryCollection(String[] dirs) { - localDirs = new ArrayList(); - localDirs.addAll(Arrays.asList(dirs)); - failedDirs = new ArrayList(); + localDirs = new CopyOnWriteArrayList(dirs); + failedDirs = new CopyOnWriteArrayList(); } /** * @return the current valid directories */ synchronized List getGoodDirs() { - return localDirs; + return Collections.unmodifiableList(localDirs); } /** * @return the failed directories */ synchronized List getFailedDirs() { - return failedDirs; + return Collections.unmodifiableList(failedDirs); } /** @@ -75,22 +73,17 @@ class DirectoryCollection { */ synchronized boolean checkDirs() { int oldNumFailures = numFailures; - ListIterator it = localDirs.listIterator(); - while (it.hasNext()) { - final String dir = it.next(); + for (final String dir : localDirs) { try { DiskChecker.checkDir(new File(dir)); } catch (DiskErrorException de) { LOG.warn("Directory " + dir + " error " + de.getMessage() + ", removing from the list of valid directories."); - it.remove(); + localDirs.remove(dir); failedDirs.add(dir); numFailures++; } } - if (numFailures > oldNumFailures) { - return true; - } - return false; + return numFailures > oldNumFailures; } } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDirectoryCollection.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDirectoryCollection.java new file mode 100644 index 00000000000..9f6fcf7a3f3 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDirectoryCollection.java @@ -0,0 +1,68 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.server.nodemanager; + +import java.io.File; +import java.io.IOException; +import java.util.List; +import java.util.ListIterator; + +import org.apache.hadoop.fs.FileUtil; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestDirectoryCollection { + + private static final File testDir = new File("target", + TestDirectoryCollection.class.getName()).getAbsoluteFile(); + private static final File testFile = new File(testDir, "testfile"); + + @BeforeClass + public static void setup() throws IOException { + testDir.mkdirs(); + testFile.createNewFile(); + } + + @AfterClass + public static void teardown() { + FileUtil.fullyDelete(testDir); + } + + @Test + public void testConcurrentAccess() throws IOException { + // Initialize DirectoryCollection with a file instead of a directory + String[] dirs = {testFile.getPath()}; + DirectoryCollection dc = new DirectoryCollection(dirs); + + // Create an iterator before checkDirs is called to reliable test case + List list = dc.getGoodDirs(); + ListIterator li = list.listIterator(); + + // DiskErrorException will invalidate iterator of non-concurrent + // collections. ConcurrentModificationException will be thrown upon next + // use of the iterator. + Assert.assertTrue("checkDirs did not remove test file from directory list", + dc.checkDirs()); + + // Verify no ConcurrentModification is thrown + li.next(); + } +}