MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core. Contributed by Varun Saxena

(cherry picked from commit 814afa46ef)
This commit is contained in:
Junping Du 2015-02-16 09:38:05 -08:00
parent fabac2ff30
commit 1d91daaae9
10 changed files with 24 additions and 31 deletions

View File

@ -58,6 +58,9 @@ Release 2.7.0 - UNRELEASED
MAPREDUCE-6256. Removed unused private methods in o.a.h.mapreduce.Job.java.
(Naganarasimha G R via ozawa)
MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core.
(Varun Saxena via junping_du)
OPTIMIZATIONS
MAPREDUCE-6169. MergeQueue should release reference to the current item

View File

@ -145,7 +145,7 @@ class IndexCache {
*/
public void removeMap(String mapId) {
IndexInformation info = cache.get(mapId);
if (info == null || ((info != null) && isUnderConstruction(info))) {
if (info == null || isUnderConstruction(info)) {
return;
}
info = cache.remove(mapId);

View File

@ -75,7 +75,7 @@ public class TaskLogAppender extends FileAppender implements Flushable {
if (maxEvents == null) {
String propValue = System.getProperty(LOGSIZE_PROPERTY, "0");
setTotalLogFileSize(Long.valueOf(propValue));
setTotalLogFileSize(Long.parseLong(propValue));
}
}

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapred.lib;
import java.io.*;
import java.lang.reflect.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -49,9 +47,7 @@ public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
protected CombineFileSplit split;
protected JobConf jc;
protected Reporter reporter;
protected Class<RecordReader<K, V>> rrClass;
protected Constructor<RecordReader<K, V>> rrConstructor;
protected FileSystem fs;
protected int idx;
protected long progress;
@ -106,7 +102,6 @@ public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
throws IOException {
this.split = split;
this.jc = job;
this.rrClass = rrClass;
this.reporter = reporter;
this.idx = 0;
this.curReader = null;

View File

@ -390,10 +390,12 @@ class JobSubmitter {
short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10);
copyAndConfigureFiles(job, jobSubmitDir, replication);
// Set the working directory
if (job.getWorkingDirectory() == null) {
job.setWorkingDirectory(jtFs.getWorkingDirectory());
}
// Get the working directory. If not set, sets it to filesystem working dir
// This code has been added so that working directory reset before running
// the job. This is necessary for backward compatibility as other systems
// might use the public API JobConf#setWorkingDirectory to reset the working
// directory.
job.getWorkingDirectory();
}
/**
@ -773,11 +775,6 @@ class JobSubmitter {
if (!log4jPropertyFile.isEmpty()) {
short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10);
copyLog4jPropertyFile(job, jobSubmitDir, replication);
// Set the working directory
if (job.getWorkingDirectory() == null) {
job.setWorkingDirectory(jtFs.getWorkingDirectory());
}
}
}
}

View File

@ -90,7 +90,7 @@ public class FieldSelectionHelper {
}
pos = fieldSpec.indexOf('-');
if (pos < 0) {
Integer fn = new Integer(fieldSpec);
Integer fn = Integer.valueOf(fieldSpec);
fieldList.add(fn);
} else {
String start = fieldSpec.substring(0, pos);

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.lib.input;
import java.io.*;
import java.lang.reflect.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -46,9 +44,7 @@ public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
Integer.class};
protected CombineFileSplit split;
protected Class<? extends RecordReader<K,V>> rrClass;
protected Constructor<? extends RecordReader<K,V>> rrConstructor;
protected FileSystem fs;
protected TaskAttemptContext context;
protected int idx;
@ -111,7 +107,6 @@ public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
throws IOException {
this.split = split;
this.context = context;
this.rrClass = rrClass;
this.idx = 0;
this.curReader = null;
this.progress = 0;

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.mapreduce.security;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import javax.crypto.SecretKey;
import javax.servlet.http.HttpServletRequest;
@ -141,10 +141,15 @@ public class SecureShuffleUtils {
*/
public static String toHex(byte[] ba) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
String strHex = "";
try {
PrintStream ps = new PrintStream(baos, false, "UTF-8");
for (byte b : ba) {
ps.printf("%x", b);
}
return baos.toString();
strHex = baos.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
}
return strHex;
}
}

View File

@ -79,10 +79,8 @@ public class InMemoryReader<K, V> extends Reader<K, V> {
File dumpFile = new File("../output/" + taskAttemptId + ".dump");
System.err.println("Dumping corrupt map-output of " + taskAttemptId +
" to " + dumpFile.getAbsolutePath());
try {
FileOutputStream fos = new FileOutputStream(dumpFile);
try (FileOutputStream fos = new FileOutputStream(dumpFile)) {
fos.write(buffer, 0, bufferSize);
fos.close();
} catch (IOException ioe) {
System.err.println("Failed to dump map-output of " + taskAttemptId);
}

View File

@ -59,7 +59,7 @@ public class ResourceBundles {
catch (Exception e) {
return defaultValue;
}
return value == null ? defaultValue : value;
return value;
}
private static String getLookupKey(String key, String suffix) {