HADOOP-11821. Fix findbugs warnings in hadoop-sls. Contributed by Brahma Reddy Battula.

(cherry picked from commit f384a063a6)
This commit is contained in:
Akira Ajisaka 2015-04-30 19:34:41 +09:00
parent 2f2d0aa990
commit 47ba08a119
6 changed files with 89 additions and 75 deletions

View File

@ -124,6 +124,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-11881. test-patch.sh javac result is wildly wrong (Kengo Seki via
aw)
HADOOP-11821. Fix findbugs warnings in hadoop-sls.
(Brahma Reddy Battula via aajisaka)
Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -17,22 +17,12 @@
*/
package org.apache.hadoop.yarn.sls;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.apache.hadoop.yarn.sls.utils.SLSUtils;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.util.ArrayList;
@ -44,6 +34,17 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.sls.utils.SLSUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
@Private
@Unstable
public class RumenToSLSConverter {
@ -119,10 +120,10 @@ public class RumenToSLSConverter {
private static void generateSLSLoadFile(String inputFile, String outputFile)
throws IOException {
Reader input = new FileReader(inputFile);
try {
Writer output = new FileWriter(outputFile);
try {
try (Reader input =
new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
try (Writer output =
new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues(
@ -131,19 +132,15 @@ public class RumenToSLSConverter {
Map m = i.next();
output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
}
} finally {
output.close();
}
} finally {
input.close();
}
}
@SuppressWarnings("unchecked")
private static void generateSLSNodeFile(String outputFile)
throws IOException {
Writer output = new FileWriter(outputFile);
try {
try (Writer output =
new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
@ -158,8 +155,6 @@ public class RumenToSLSConverter {
rack.put("nodes", nodes);
output.write(writer.writeValueAsString(rack) + EOL);
}
} finally {
output.close();
}
}

View File

@ -18,48 +18,48 @@
package org.apache.hadoop.yarn.sls;
import java.io.File;
import java.io.FileReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.text.MessageFormat;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.HashSet;
import java.util.List;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Random;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobTraceReader;
import org.apache.hadoop.tools.rumen.LoggedJob;
import org.apache.hadoop.tools.rumen.LoggedTask;
import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.sls.appmaster.AMSimulator;
import org.apache.hadoop.yarn.sls.conf.SLSConfiguration;
import org.apache.hadoop.yarn.sls.nodemanager.NMSimulator;
import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator;
import org.apache.hadoop.yarn.sls.scheduler.ResourceSchedulerWrapper;
import org.apache.hadoop.yarn.sls.scheduler.TaskRunner;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.sls.utils.SLSUtils;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonFactory;
@ -263,7 +263,8 @@ public class SLSRunner {
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
for (String inputTrace : inputTraces) {
Reader input = new FileReader(inputTrace);
Reader input =
new InputStreamReader(new FileInputStream(inputTrace), "UTF-8");
try {
Iterator<Map> i = mapper.readValues(jsonF.createJsonParser(input),
Map.class);

View File

@ -19,8 +19,9 @@ package org.apache.hadoop.yarn.sls.scheduler;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@ -488,8 +489,9 @@ public class ResourceSchedulerWrapper
TimeUnit.MILLISECONDS);
// application running information
jobRuntimeLogBW = new BufferedWriter(
new FileWriter(metricsOutputDir + "/jobruntime.csv"));
jobRuntimeLogBW =
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
metricsOutputDir + "/jobruntime.csv"), "UTF-8"));
jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," +
"simulate_start_time,simulate_end_time" + EOL);
jobRuntimeLogBW.flush();
@ -693,8 +695,9 @@ public class ResourceSchedulerWrapper
private boolean firstLine = true;
public MetricsLogRunnable() {
try {
metricsLogBW = new BufferedWriter(
new FileWriter(metricsOutputDir + "/realtimetrack.json"));
metricsLogBW =
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
metricsOutputDir + "/realtimetrack.json"), "UTF-8"));
metricsLogBW.write("[");
} catch (IOException e) {
e.printStackTrace();

View File

@ -17,6 +17,17 @@
*/
package org.apache.hadoop.yarn.sls.utils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
@ -28,16 +39,6 @@ import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.List;
import java.util.Iterator;
@Private
@Unstable
public class SLSUtils {
@ -89,7 +90,8 @@ public class SLSUtils {
Set<String> nodeSet = new HashSet<String>();
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input = new FileReader(jobTrace);
Reader input =
new InputStreamReader(new FileInputStream(jobTrace), "UTF-8");
try {
Iterator<Map> i = mapper.readValues(
jsonF.createJsonParser(input), Map.class);
@ -116,7 +118,8 @@ public class SLSUtils {
Set<String> nodeSet = new HashSet<String>();
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input = new FileReader(nodeFile);
Reader input =
new InputStreamReader(new FileInputStream(nodeFile), "UTF-8");
try {
Iterator<Map> i = mapper.readValues(
jsonF.createJsonParser(input), Map.class);

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.sls.web;
import java.io.File;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
@ -32,22 +33,21 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
.SchedulerEventType;
import org.mortbay.jetty.Handler;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.handler.AbstractHandler;
import org.mortbay.jetty.Request;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics;
import org.apache.hadoop.yarn.sls.scheduler.ResourceSchedulerWrapper;
import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics;
import org.mortbay.jetty.Handler;
import org.mortbay.jetty.Request;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.handler.AbstractHandler;
import org.mortbay.jetty.handler.ResourceHandler;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry;
import org.mortbay.jetty.handler.ResourceHandler;
@Private
@Unstable
@ -69,9 +69,10 @@ public class SLSWebApp extends HttpServlet {
private transient Gauge availableVCoresGauge;
private transient Histogram allocateTimecostHistogram;
private transient Histogram handleTimecostHistogram;
private Map<SchedulerEventType, Histogram> handleOperTimecostHistogramMap;
private Map<String, Counter> queueAllocatedMemoryCounterMap;
private Map<String, Counter> queueAllocatedVCoresCounterMap;
private transient Map<SchedulerEventType, Histogram>
handleOperTimecostHistogramMap;
private transient Map<String, Counter> queueAllocatedMemoryCounterMap;
private transient Map<String, Counter> queueAllocatedVCoresCounterMap;
private int port;
private int ajaxUpdateTimeMS = 1000;
// html page templates
@ -94,6 +95,14 @@ public class SLSWebApp extends HttpServlet {
}
}
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
in.defaultReadObject();
handleOperTimecostHistogramMap = new HashMap<>();
queueAllocatedMemoryCounterMap = new HashMap<>();
queueAllocatedVCoresCounterMap = new HashMap<>();
}
public SLSWebApp(ResourceSchedulerWrapper wrapper, int metricsAddressPort) {
this.wrapper = wrapper;
metrics = wrapper.getMetrics();