HADOOP-11821. Fix findbugs warnings in hadoop-sls. Contributed by Brahma Reddy Battula.

This commit is contained in:
Akira Ajisaka 2015-04-30 19:34:41 +09:00
parent e89fc53a1d
commit f384a063a6
7 changed files with 138 additions and 148 deletions

View File

@ -579,6 +579,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-11881. test-patch.sh javac result is wildly wrong (Kengo Seki via HADOOP-11881. test-patch.sh javac result is wildly wrong (Kengo Seki via
aw) aw)
HADOOP-11821. Fix findbugs warnings in hadoop-sls.
(Brahma Reddy Battula via aajisaka)
Release 2.7.1 - UNRELEASED Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -17,22 +17,12 @@
*/ */
package org.apache.hadoop.yarn.sls; package org.apache.hadoop.yarn.sls;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.apache.hadoop.yarn.sls.utils.SLSUtils;
import java.io.File; import java.io.File;
import java.io.FileReader; import java.io.FileInputStream;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader; import java.io.Reader;
import java.io.Writer; import java.io.Writer;
import java.util.ArrayList; import java.util.ArrayList;
@ -44,6 +34,17 @@ import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.sls.utils.SLSUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
@Private @Private
@Unstable @Unstable
public class RumenToSLSConverter { public class RumenToSLSConverter {
@ -119,10 +120,10 @@ public class RumenToSLSConverter {
private static void generateSLSLoadFile(String inputFile, String outputFile) private static void generateSLSLoadFile(String inputFile, String outputFile)
throws IOException { throws IOException {
Reader input = new FileReader(inputFile); try (Reader input =
try { new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
Writer output = new FileWriter(outputFile); try (Writer output =
try { new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues( Iterator<Map> i = mapper.readValues(
@ -131,19 +132,15 @@ public class RumenToSLSConverter {
Map m = i.next(); Map m = i.next();
output.write(writer.writeValueAsString(createSLSJob(m)) + EOL); output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
} }
} finally {
output.close();
} }
} finally {
input.close();
} }
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static void generateSLSNodeFile(String outputFile) private static void generateSLSNodeFile(String outputFile)
throws IOException { throws IOException {
Writer output = new FileWriter(outputFile); try (Writer output =
try { new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) { for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
@ -158,8 +155,6 @@ public class RumenToSLSConverter {
rack.put("nodes", nodes); rack.put("nodes", nodes);
output.write(writer.writeValueAsString(rack) + EOL); output.write(writer.writeValueAsString(rack) + EOL);
} }
} finally {
output.close();
} }
} }

View File

@ -18,51 +18,51 @@
package org.apache.hadoop.yarn.sls; package org.apache.hadoop.yarn.sls;
import java.io.File; import java.io.File;
import java.io.FileReader; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.HashSet;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator;
import java.util.Random;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.JobTraceReader;
import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedJob;
import org.apache.hadoop.tools.rumen.LoggedTask; import org.apache.hadoop.tools.rumen.LoggedTask;
import org.apache.hadoop.tools.rumen.LoggedTaskAttempt; import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.sls.appmaster.AMSimulator; import org.apache.hadoop.yarn.sls.appmaster.AMSimulator;
import org.apache.hadoop.yarn.sls.conf.SLSConfiguration; import org.apache.hadoop.yarn.sls.conf.SLSConfiguration;
import org.apache.hadoop.yarn.sls.nodemanager.NMSimulator; import org.apache.hadoop.yarn.sls.nodemanager.NMSimulator;
import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator; import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator;
import org.apache.hadoop.yarn.sls.scheduler.ResourceSchedulerWrapper; import org.apache.hadoop.yarn.sls.scheduler.ResourceSchedulerWrapper;
import org.apache.hadoop.yarn.sls.scheduler.SLSCapacityScheduler; import org.apache.hadoop.yarn.sls.scheduler.SLSCapacityScheduler;
import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper;
import org.apache.hadoop.yarn.sls.scheduler.TaskRunner; import org.apache.hadoop.yarn.sls.scheduler.TaskRunner;
import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.sls.utils.SLSUtils; import org.apache.hadoop.yarn.sls.utils.SLSUtils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonFactory;
@ -277,7 +277,8 @@ public class SLSRunner {
JsonFactory jsonF = new JsonFactory(); JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
for (String inputTrace : inputTraces) { for (String inputTrace : inputTraces) {
Reader input = new FileReader(inputTrace); Reader input =
new InputStreamReader(new FileInputStream(inputTrace), "UTF-8");
try { try {
Iterator<Map> i = mapper.readValues(jsonF.createJsonParser(input), Iterator<Map> i = mapper.readValues(jsonF.createJsonParser(input),
Map.class); Map.class);

View File

@ -19,8 +19,9 @@ package org.apache.hadoop.yarn.sls.scheduler;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -487,8 +488,9 @@ final public class ResourceSchedulerWrapper
TimeUnit.MILLISECONDS); TimeUnit.MILLISECONDS);
// application running information // application running information
jobRuntimeLogBW = new BufferedWriter( jobRuntimeLogBW =
new FileWriter(metricsOutputDir + "/jobruntime.csv")); new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
metricsOutputDir + "/jobruntime.csv"), "UTF-8"));
jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," + jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," +
"simulate_start_time,simulate_end_time" + EOL); "simulate_start_time,simulate_end_time" + EOL);
jobRuntimeLogBW.flush(); jobRuntimeLogBW.flush();
@ -692,8 +694,9 @@ final public class ResourceSchedulerWrapper
private boolean firstLine = true; private boolean firstLine = true;
public MetricsLogRunnable() { public MetricsLogRunnable() {
try { try {
metricsLogBW = new BufferedWriter( metricsLogBW =
new FileWriter(metricsOutputDir + "/realtimetrack.json")); new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
metricsOutputDir + "/realtimetrack.json"), "UTF-8"));
metricsLogBW.write("["); metricsLogBW.write("[");
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();

View File

@ -17,76 +17,11 @@
*/ */
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.conf.SLSConfiguration;
import org.apache.hadoop.yarn.sls.web.SLSWebApp;
import com.codahale.metrics.Counter;
import com.codahale.metrics.CsvReporter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.SlidingWindowReservoir;
import com.codahale.metrics.Timer;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode
.UpdatedContainerInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.PreemptableResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler
.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler
.SchedulerAppReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler
.SchedulerNodeReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity
.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
.SchedulerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair
.FairScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo
.FifoScheduler;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.log4j.Logger;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -102,6 +37,47 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.conf.SLSConfiguration;
import org.apache.hadoop.yarn.sls.web.SLSWebApp;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.log4j.Logger;
import com.codahale.metrics.Counter;
import com.codahale.metrics.CsvReporter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.SlidingWindowReservoir;
import com.codahale.metrics.Timer;
@Private @Private
@Unstable @Unstable
public class SLSCapacityScheduler extends CapacityScheduler implements public class SLSCapacityScheduler extends CapacityScheduler implements
@ -490,8 +466,9 @@ public class SLSCapacityScheduler extends CapacityScheduler implements
TimeUnit.MILLISECONDS); TimeUnit.MILLISECONDS);
// application running information // application running information
jobRuntimeLogBW = new BufferedWriter( jobRuntimeLogBW =
new FileWriter(metricsOutputDir + "/jobruntime.csv")); new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
metricsOutputDir + "/jobruntime.csv"), "UTF-8"));
jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," + jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," +
"simulate_start_time,simulate_end_time" + EOL); "simulate_start_time,simulate_end_time" + EOL);
jobRuntimeLogBW.flush(); jobRuntimeLogBW.flush();
@ -695,8 +672,9 @@ public class SLSCapacityScheduler extends CapacityScheduler implements
private boolean firstLine = true; private boolean firstLine = true;
public MetricsLogRunnable() { public MetricsLogRunnable() {
try { try {
metricsLogBW = new BufferedWriter( metricsLogBW =
new FileWriter(metricsOutputDir + "/realtimetrack.json")); new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
metricsOutputDir + "/realtimetrack.json"), "UTF-8"));
metricsLogBW.write("["); metricsLogBW.write("[");
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();

View File

@ -17,6 +17,17 @@
*/ */
package org.apache.hadoop.yarn.sls.utils; package org.apache.hadoop.yarn.sls.utils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -28,16 +39,6 @@ import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.List;
import java.util.Iterator;
@Private @Private
@Unstable @Unstable
public class SLSUtils { public class SLSUtils {
@ -89,7 +90,8 @@ public class SLSUtils {
Set<String> nodeSet = new HashSet<String>(); Set<String> nodeSet = new HashSet<String>();
JsonFactory jsonF = new JsonFactory(); JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
Reader input = new FileReader(jobTrace); Reader input =
new InputStreamReader(new FileInputStream(jobTrace), "UTF-8");
try { try {
Iterator<Map> i = mapper.readValues( Iterator<Map> i = mapper.readValues(
jsonF.createJsonParser(input), Map.class); jsonF.createJsonParser(input), Map.class);
@ -116,7 +118,8 @@ public class SLSUtils {
Set<String> nodeSet = new HashSet<String>(); Set<String> nodeSet = new HashSet<String>();
JsonFactory jsonF = new JsonFactory(); JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
Reader input = new FileReader(nodeFile); Reader input =
new InputStreamReader(new FileInputStream(nodeFile), "UTF-8");
try { try {
Iterator<Map> i = mapper.readValues( Iterator<Map> i = mapper.readValues(
jsonF.createJsonParser(input), Map.class); jsonF.createJsonParser(input), Map.class);

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.sls.web;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectInputStream;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -32,24 +33,21 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
.SchedulerEventType;
import org.mortbay.jetty.Handler;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.handler.AbstractHandler;
import org.mortbay.jetty.Request;
import org.apache.hadoop.yarn.sls.SLSRunner; import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics; import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics;
import org.apache.hadoop.yarn.sls.scheduler.ResourceSchedulerWrapper;
import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics; import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics;
import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper; import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper;
import org.mortbay.jetty.Handler;
import org.mortbay.jetty.Request;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.handler.AbstractHandler;
import org.mortbay.jetty.handler.ResourceHandler;
import com.codahale.metrics.Counter; import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge; import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram; import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import org.mortbay.jetty.handler.ResourceHandler;
@Private @Private
@Unstable @Unstable
@ -71,9 +69,10 @@ public class SLSWebApp extends HttpServlet {
private transient Gauge availableVCoresGauge; private transient Gauge availableVCoresGauge;
private transient Histogram allocateTimecostHistogram; private transient Histogram allocateTimecostHistogram;
private transient Histogram handleTimecostHistogram; private transient Histogram handleTimecostHistogram;
private Map<SchedulerEventType, Histogram> handleOperTimecostHistogramMap; private transient Map<SchedulerEventType, Histogram>
private Map<String, Counter> queueAllocatedMemoryCounterMap; handleOperTimecostHistogramMap;
private Map<String, Counter> queueAllocatedVCoresCounterMap; private transient Map<String, Counter> queueAllocatedMemoryCounterMap;
private transient Map<String, Counter> queueAllocatedVCoresCounterMap;
private int port; private int port;
private int ajaxUpdateTimeMS = 1000; private int ajaxUpdateTimeMS = 1000;
// html page templates // html page templates
@ -96,6 +95,14 @@ public class SLSWebApp extends HttpServlet {
} }
} }
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
in.defaultReadObject();
handleOperTimecostHistogramMap = new HashMap<>();
queueAllocatedMemoryCounterMap = new HashMap<>();
queueAllocatedVCoresCounterMap = new HashMap<>();
}
public SLSWebApp(SchedulerWrapper wrapper, int metricsAddressPort) { public SLSWebApp(SchedulerWrapper wrapper, int metricsAddressPort) {
this.wrapper = wrapper; this.wrapper = wrapper;
metrics = wrapper.getMetrics(); metrics = wrapper.getMetrics();