MAPREDUCE-6252. JobHistoryServer should not fail when encountering a

missing directory. Contributed by Craig Welch.
This commit is contained in:
Devaraj K 2015-04-27 15:01:42 +05:30
parent 618ba707f0
commit 5e67c4d384
3 changed files with 37 additions and 7 deletions

View File

@ -349,6 +349,9 @@ Release 2.8.0 - UNRELEASED
MAPREDUCE-6333. TestEvents,TestAMWebServicesTasks,TestAppController are MAPREDUCE-6333. TestEvents,TestAMWebServicesTasks,TestAppController are
broken due to MAPREDUCE-6297. (Siqi Li via gera) broken due to MAPREDUCE-6297. (Siqi Li via gera)
MAPREDUCE-6252. JobHistoryServer should not fail when encountering a
missing directory. (Craig Welch via devaraj)
Release 2.7.1 - UNRELEASED Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -740,10 +740,12 @@ public class HistoryFileManager extends AbstractService {
} }
} }
private static List<FileStatus> scanDirectory(Path path, FileContext fc, @VisibleForTesting
protected static List<FileStatus> scanDirectory(Path path, FileContext fc,
PathFilter pathFilter) throws IOException { PathFilter pathFilter) throws IOException {
path = fc.makeQualified(path); path = fc.makeQualified(path);
List<FileStatus> jhStatusList = new ArrayList<FileStatus>(); List<FileStatus> jhStatusList = new ArrayList<FileStatus>();
try {
RemoteIterator<FileStatus> fileStatusIter = fc.listStatus(path); RemoteIterator<FileStatus> fileStatusIter = fc.listStatus(path);
while (fileStatusIter.hasNext()) { while (fileStatusIter.hasNext()) {
FileStatus fileStatus = fileStatusIter.next(); FileStatus fileStatus = fileStatusIter.next();
@ -752,6 +754,9 @@ public class HistoryFileManager extends AbstractService {
jhStatusList.add(fileStatus); jhStatusList.add(fileStatus);
} }
} }
} catch (FileNotFoundException fe) {
LOG.error("Error while scanning directory " + path, fe);
}
return jhStatusList; return jhStatusList;
} }

View File

@ -21,13 +21,17 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.FileNotFoundException;
import java.util.UUID; import java.util.UUID;
import java.util.List;
import org.junit.Assert; import org.junit.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
@ -45,6 +49,8 @@ import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import static org.mockito.Mockito.*;
public class TestHistoryFileManager { public class TestHistoryFileManager {
private static MiniDFSCluster dfsCluster = null; private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null; private static MiniDFSCluster dfsCluster2 = null;
@ -199,4 +205,20 @@ public class TestHistoryFileManager {
testCreateHistoryDirs(dfsCluster.getConfiguration(0), clock); testCreateHistoryDirs(dfsCluster.getConfiguration(0), clock);
} }
@Test
public void testScanDirectory() throws Exception {
Path p = new Path("any");
FileContext fc = mock(FileContext.class);
when(fc.makeQualified(p)).thenReturn(p);
when(fc.listStatus(p)).thenThrow(new FileNotFoundException());
List<FileStatus> lfs = HistoryFileManager.scanDirectory(p, fc, null);
//primarily, succcess is that an exception was not thrown. Also nice to
//check this
Assert.assertNotNull(lfs);
}
} }