diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SLSCapacityScheduler.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SLSCapacityScheduler.java index 72807de0b9e..efb1716f70a 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SLSCapacityScheduler.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SLSCapacityScheduler.java @@ -84,8 +84,10 @@ import java.io.BufferedWriter; import java.io.File; -import java.io.FileWriter; +import java.io.FileOutputStream; import java.io.IOException; +import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -488,7 +490,8 @@ private void initMetrics() throws Exception { // application running information jobRuntimeLogBW = new BufferedWriter( - new FileWriter(metricsOutputDir + "/jobruntime.csv")); + new OutputStreamWriter(new FileOutputStream( + metricsOutputDir + "/jobruntime.csv"), StandardCharsets.UTF_8)); jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," + "simulate_start_time,simulate_end_time" + EOL); jobRuntimeLogBW.flush(); @@ -693,7 +696,9 @@ class MetricsLogRunnable implements Runnable { public MetricsLogRunnable() { try { metricsLogBW = new BufferedWriter( - new FileWriter(metricsOutputDir + "/realtimetrack.json")); + new OutputStreamWriter(new FileOutputStream( + metricsOutputDir + "/realtimetrack.json"), + StandardCharsets.UTF_8)); metricsLogBW.write("["); } catch (IOException e) { e.printStackTrace();