MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core. Contributed by Varun Saxena

(cherry picked from commit 814afa46ef)
This commit is contained in:
Junping Du 2015-02-16 09:38:05 -08:00
parent fabac2ff30
commit 1d91daaae9
10 changed files with 24 additions and 31 deletions

View File

@ -58,6 +58,9 @@ Release 2.7.0 - UNRELEASED
MAPREDUCE-6256. Removed unused private methods in o.a.h.mapreduce.Job.java. MAPREDUCE-6256. Removed unused private methods in o.a.h.mapreduce.Job.java.
(Naganarasimha G R via ozawa) (Naganarasimha G R via ozawa)
MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core.
(Varun Saxena via junping_du)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-6169. MergeQueue should release reference to the current item MAPREDUCE-6169. MergeQueue should release reference to the current item

View File

@ -145,7 +145,7 @@ class IndexCache {
*/ */
public void removeMap(String mapId) { public void removeMap(String mapId) {
IndexInformation info = cache.get(mapId); IndexInformation info = cache.get(mapId);
if (info == null || ((info != null) && isUnderConstruction(info))) { if (info == null || isUnderConstruction(info)) {
return; return;
} }
info = cache.remove(mapId); info = cache.remove(mapId);

View File

@ -75,7 +75,7 @@ public class TaskLogAppender extends FileAppender implements Flushable {
if (maxEvents == null) { if (maxEvents == null) {
String propValue = System.getProperty(LOGSIZE_PROPERTY, "0"); String propValue = System.getProperty(LOGSIZE_PROPERTY, "0");
setTotalLogFileSize(Long.valueOf(propValue)); setTotalLogFileSize(Long.parseLong(propValue));
} }
} }

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapred.lib;
import java.io.*; import java.io.*;
import java.lang.reflect.*; import java.lang.reflect.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.*;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -49,9 +47,7 @@ public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
protected CombineFileSplit split; protected CombineFileSplit split;
protected JobConf jc; protected JobConf jc;
protected Reporter reporter; protected Reporter reporter;
protected Class<RecordReader<K, V>> rrClass;
protected Constructor<RecordReader<K, V>> rrConstructor; protected Constructor<RecordReader<K, V>> rrConstructor;
protected FileSystem fs;
protected int idx; protected int idx;
protected long progress; protected long progress;
@ -106,7 +102,6 @@ public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
throws IOException { throws IOException {
this.split = split; this.split = split;
this.jc = job; this.jc = job;
this.rrClass = rrClass;
this.reporter = reporter; this.reporter = reporter;
this.idx = 0; this.idx = 0;
this.curReader = null; this.curReader = null;

View File

@ -390,10 +390,12 @@ class JobSubmitter {
short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10); short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10);
copyAndConfigureFiles(job, jobSubmitDir, replication); copyAndConfigureFiles(job, jobSubmitDir, replication);
// Set the working directory // Get the working directory. If not set, sets it to filesystem working dir
if (job.getWorkingDirectory() == null) { // This code has been added so that working directory reset before running
job.setWorkingDirectory(jtFs.getWorkingDirectory()); // the job. This is necessary for backward compatibility as other systems
} // might use the public API JobConf#setWorkingDirectory to reset the working
// directory.
job.getWorkingDirectory();
} }
/** /**
@ -773,11 +775,6 @@ class JobSubmitter {
if (!log4jPropertyFile.isEmpty()) { if (!log4jPropertyFile.isEmpty()) {
short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10); short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10);
copyLog4jPropertyFile(job, jobSubmitDir, replication); copyLog4jPropertyFile(job, jobSubmitDir, replication);
// Set the working directory
if (job.getWorkingDirectory() == null) {
job.setWorkingDirectory(jtFs.getWorkingDirectory());
}
} }
} }
} }

View File

@ -90,7 +90,7 @@ public class FieldSelectionHelper {
} }
pos = fieldSpec.indexOf('-'); pos = fieldSpec.indexOf('-');
if (pos < 0) { if (pos < 0) {
Integer fn = new Integer(fieldSpec); Integer fn = Integer.valueOf(fieldSpec);
fieldList.add(fn); fieldList.add(fn);
} else { } else {
String start = fieldSpec.substring(0, pos); String start = fieldSpec.substring(0, pos);

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.lib.input;
import java.io.*; import java.io.*;
import java.lang.reflect.*; import java.lang.reflect.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -46,9 +44,7 @@ public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
Integer.class}; Integer.class};
protected CombineFileSplit split; protected CombineFileSplit split;
protected Class<? extends RecordReader<K,V>> rrClass;
protected Constructor<? extends RecordReader<K,V>> rrConstructor; protected Constructor<? extends RecordReader<K,V>> rrConstructor;
protected FileSystem fs;
protected TaskAttemptContext context; protected TaskAttemptContext context;
protected int idx; protected int idx;
@ -111,7 +107,6 @@ public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
throws IOException { throws IOException {
this.split = split; this.split = split;
this.context = context; this.context = context;
this.rrClass = rrClass;
this.idx = 0; this.idx = 0;
this.curReader = null; this.curReader = null;
this.progress = 0; this.progress = 0;

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.mapreduce.security;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL; import java.net.URL;
import javax.crypto.SecretKey; import javax.crypto.SecretKey;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
@ -141,10 +141,15 @@ public class SecureShuffleUtils {
*/ */
public static String toHex(byte[] ba) { public static String toHex(byte[] ba) {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos); String strHex = "";
for (byte b : ba) { try {
ps.printf("%x", b); PrintStream ps = new PrintStream(baos, false, "UTF-8");
for (byte b : ba) {
ps.printf("%x", b);
}
strHex = baos.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
} }
return baos.toString(); return strHex;
} }
} }

View File

@ -79,10 +79,8 @@ public class InMemoryReader<K, V> extends Reader<K, V> {
File dumpFile = new File("../output/" + taskAttemptId + ".dump"); File dumpFile = new File("../output/" + taskAttemptId + ".dump");
System.err.println("Dumping corrupt map-output of " + taskAttemptId + System.err.println("Dumping corrupt map-output of " + taskAttemptId +
" to " + dumpFile.getAbsolutePath()); " to " + dumpFile.getAbsolutePath());
try { try (FileOutputStream fos = new FileOutputStream(dumpFile)) {
FileOutputStream fos = new FileOutputStream(dumpFile);
fos.write(buffer, 0, bufferSize); fos.write(buffer, 0, bufferSize);
fos.close();
} catch (IOException ioe) { } catch (IOException ioe) {
System.err.println("Failed to dump map-output of " + taskAttemptId); System.err.println("Failed to dump map-output of " + taskAttemptId);
} }

View File

@ -59,7 +59,7 @@ public class ResourceBundles {
catch (Exception e) { catch (Exception e) {
return defaultValue; return defaultValue;
} }
return value == null ? defaultValue : value; return value;
} }
private static String getLookupKey(String key, String suffix) { private static String getLookupKey(String key, String suffix) {