MAPREDUCE-6881. Fix warnings from Spotbugs in hadoop-mapreduce. Contributed by Weiwei Yang.

(cherry picked from commit 3ed3062fe3)
This commit is contained in:
Akira Ajisaka 2017-04-27 15:45:33 +09:00 committed by Masatake Iwasaki
parent 21788f9fd4
commit 7dedf344de
6 changed files with 53 additions and 36 deletions

View File

@ -27,6 +27,8 @@ import java.lang.management.ThreadMXBean;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.Collections;
import java.util.concurrent.BlockingQueue; import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
@ -81,7 +83,7 @@ public class LocalContainerLauncher extends AbstractService implements
private static final Log LOG = LogFactory.getLog(LocalContainerLauncher.class); private static final Log LOG = LogFactory.getLog(LocalContainerLauncher.class);
private FileContext curFC = null; private FileContext curFC = null;
private final HashSet<File> localizedFiles; private Set<File> localizedFiles = new HashSet<File>();
private final AppContext context; private final AppContext context;
private final TaskUmbilicalProtocol umbilical; private final TaskUmbilicalProtocol umbilical;
private final ClassLoader jobClassLoader; private final ClassLoader jobClassLoader;
@ -121,9 +123,12 @@ public class LocalContainerLauncher extends AbstractService implements
// users who do that get what they deserve (and will have to disable // users who do that get what they deserve (and will have to disable
// uberization in order to run correctly). // uberization in order to run correctly).
File[] curLocalFiles = curDir.listFiles(); File[] curLocalFiles = curDir.listFiles();
localizedFiles = new HashSet<File>(curLocalFiles.length); if (curLocalFiles != null) {
for (int j = 0; j < curLocalFiles.length; ++j) { HashSet<File> lf = new HashSet<File>(curLocalFiles.length);
localizedFiles.add(curLocalFiles[j]); for (int j = 0; j < curLocalFiles.length; ++j) {
lf.add(curLocalFiles[j]);
}
localizedFiles = Collections.unmodifiableSet(lf);
} }
// Relocalization note/future FIXME (per chrisdo, 20110315): At moment, // Relocalization note/future FIXME (per chrisdo, 20110315): At moment,
@ -521,26 +526,29 @@ public class LocalContainerLauncher extends AbstractService implements
*/ */
private void relocalize() { private void relocalize() {
File[] curLocalFiles = curDir.listFiles(); File[] curLocalFiles = curDir.listFiles();
for (int j = 0; j < curLocalFiles.length; ++j) { if (curLocalFiles != null) {
if (!localizedFiles.contains(curLocalFiles[j])) { for (int j = 0; j < curLocalFiles.length; ++j) {
// found one that wasn't there before: delete it if (!localizedFiles.contains(curLocalFiles[j])) {
boolean deleted = false; // found one that wasn't there before: delete it
try { boolean deleted = false;
if (curFC != null) { try {
// this is recursive, unlike File delete(): if (curFC != null) {
deleted = curFC.delete(new Path(curLocalFiles[j].getName()),true); // this is recursive, unlike File delete():
deleted =
curFC.delete(new Path(curLocalFiles[j].getName()), true);
}
} catch (IOException e) {
deleted = false;
}
if (!deleted) {
LOG.warn("Unable to delete unexpected local file/dir "
+ curLocalFiles[j].getName()
+ ": insufficient permissions?");
} }
} catch (IOException e) {
deleted = false;
}
if (!deleted) {
LOG.warn("Unable to delete unexpected local file/dir "
+ curLocalFiles[j].getName() + ": insufficient permissions?");
} }
} }
} }
} }
} // end EventHandler } // end EventHandler
/** /**

View File

@ -557,13 +557,15 @@ public class MRAppMaster extends CompositeService {
private boolean isJobNamePatternMatch(JobConf conf, String jobTempDir) { private boolean isJobNamePatternMatch(JobConf conf, String jobTempDir) {
// Matched staging files should be preserved after job is finished. // Matched staging files should be preserved after job is finished.
if (conf.getKeepTaskFilesPattern() != null && jobTempDir != null) { if (conf.getKeepTaskFilesPattern() != null && jobTempDir != null) {
String jobFileName = Paths.get(jobTempDir).getFileName().toString(); java.nio.file.Path pathName = Paths.get(jobTempDir).getFileName();
Pattern pattern = Pattern.compile(conf.getKeepTaskFilesPattern()); if (pathName != null) {
Matcher matcher = pattern.matcher(jobFileName); String jobFileName = pathName.toString();
return matcher.find(); Pattern pattern = Pattern.compile(conf.getKeepTaskFilesPattern());
} else { Matcher matcher = pattern.matcher(jobFileName);
return false; return matcher.find();
}
} }
return false;
} }
private boolean isKeepFailedTaskFiles(JobConf conf) { private boolean isKeepFailedTaskFiles(JobConf conf) {

View File

@ -98,7 +98,7 @@ class JVMId {
int jobComp = this.jobId.compareTo(that.jobId); int jobComp = this.jobId.compareTo(that.jobId);
if(jobComp == 0) { if(jobComp == 0) {
if(this.isMap == that.isMap) { if(this.isMap == that.isMap) {
return Long.valueOf(this.jvmId).compareTo(that.jvmId); return Long.compare(this.jvmId, that.jvmId);
} else { } else {
return this.isMap ? -1 : 1; return this.isMap ? -1 : 1;
} }

View File

@ -34,12 +34,20 @@ public enum Operation {
KILL_TASK(QueueACL.ADMINISTER_JOBS, JobACL.MODIFY_JOB), KILL_TASK(QueueACL.ADMINISTER_JOBS, JobACL.MODIFY_JOB),
SET_JOB_PRIORITY(QueueACL.ADMINISTER_JOBS, JobACL.MODIFY_JOB), SET_JOB_PRIORITY(QueueACL.ADMINISTER_JOBS, JobACL.MODIFY_JOB),
SUBMIT_JOB(QueueACL.SUBMIT_JOB, null); SUBMIT_JOB(QueueACL.SUBMIT_JOB, null);
public QueueACL qACLNeeded; private final QueueACL qACLNeeded;
public JobACL jobACLNeeded; private final JobACL jobACLNeeded;
Operation(QueueACL qACL, JobACL jobACL) { Operation(QueueACL qACL, JobACL jobACL) {
this.qACLNeeded = qACL; this.qACLNeeded = qACL;
this.jobACLNeeded = jobACL; this.jobACLNeeded = jobACL;
} }
public QueueACL getqACLNeeded() {
return qACLNeeded;
}
public JobACL getJobACLNeeded() {
return jobACLNeeded;
}
} }

View File

@ -413,17 +413,12 @@ public class HistoryFileManager extends AbstractService {
} }
JobId jobId = jobIndexInfo.getJobId(); JobId jobId = jobIndexInfo.getJobId();
List<Path> paths = new ArrayList<Path>(2);
if (historyFile == null) { if (historyFile == null) {
LOG.info("No file for job-history with " + jobId + " found in cache!"); LOG.info("No file for job-history with " + jobId + " found in cache!");
} else {
paths.add(historyFile);
} }
if (confFile == null) { if (confFile == null) {
LOG.info("No file for jobConf with " + jobId + " found in cache!"); LOG.info("No file for jobConf with " + jobId + " found in cache!");
} else {
paths.add(confFile);
} }
if (summaryFile == null || !intermediateDoneDirFc.util().exists( if (summaryFile == null || !intermediateDoneDirFc.util().exists(

View File

@ -67,8 +67,12 @@ public final class Parser {
private void parse(File f, Map<Parameter, List<TaskResult>> sums) throws IOException { private void parse(File f, Map<Parameter, List<TaskResult>> sums) throws IOException {
if (f.isDirectory()) { if (f.isDirectory()) {
println("Process directory " + f); println("Process directory " + f);
for(File child : f.listFiles()) File[] files = f.listFiles();
parse(child, sums); if (files != null) {
for(File child : files) {
parse(child, sums);
}
}
} else if (f.getName().endsWith(".txt")) { } else if (f.getName().endsWith(".txt")) {
println("Parse file " + f); println("Parse file " + f);
final Map<Parameter, List<TaskResult>> m = new TreeMap<Parameter, List<TaskResult>>(); final Map<Parameter, List<TaskResult>> m = new TreeMap<Parameter, List<TaskResult>>();