HADOOP-12843. Fix findbugs warnings in hadoop-common (branch-2). (aajisaka)

This commit is contained in:
Akira Ajisaka 2016-03-02 00:20:57 +09:00
parent e402371b6a
commit 67e8489ac6
4 changed files with 57 additions and 65 deletions

View File

@ -1140,6 +1140,8 @@ Release 2.8.0 - UNRELEASED
ProviderUtils.excludeIncompatibleCredentialProviders.
(Larry McCay via cnauroth)
HADOOP-12843. Fix findbugs warnings in hadoop-common (branch-2). (aajisaka)
Release 2.7.3 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -18,11 +18,14 @@
package org.apache.hadoop.record.compiler;
import java.io.Writer;
import java.util.ArrayList;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.io.Charsets;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.hadoop.util.StringUtils;
/**
@ -43,34 +46,27 @@ class CGenerator extends CodeGenerator {
ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
throws IOException {
name = new File(destDir, (new File(name)).getName()).getAbsolutePath();
FileWriter cc = new FileWriter(name+".c");
try {
FileWriter hh = new FileWriter(name+".h");
try {
hh.write("#ifndef __"+
StringUtils.toUpperCase(name).replace('.','_')+"__\n");
hh.write("#define __"+
StringUtils.toUpperCase(name).replace('.','_')+"__\n");
hh.write("#include \"recordio.h\"\n");
for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
hh.write("#include \""+iter.next().getName()+".h\"\n");
}
cc.write("#include \""+name+".h\"\n");
/*
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
iter.next().genCppCode(hh, cc);
}
*/
hh.write("#endif //"+
StringUtils.toUpperCase(name).replace('.','_')+"__\n");
} finally {
hh.close();
try (Writer cc = new FileWriterWithEncoding(name+".c", Charsets.UTF_8);
Writer hh = new FileWriterWithEncoding(name+".h", Charsets.UTF_8)) {
hh.write("#ifndef __"+
StringUtils.toUpperCase(name).replace('.','_')+"__\n");
hh.write("#define __"+
StringUtils.toUpperCase(name).replace('.','_')+"__\n");
hh.write("#include \"recordio.h\"\n");
for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
hh.write("#include \""+iter.next().getName()+".h\"\n");
}
} finally {
cc.close();
cc.write("#include \""+name+".h\"\n");
/*
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
iter.next().genCppCode(hh, cc);
}
*/
hh.write("#endif //"+
StringUtils.toUpperCase(name).replace('.','_')+"__\n");
}
}
}

View File

@ -18,11 +18,14 @@
package org.apache.hadoop.record.compiler;
import java.io.Writer;
import java.util.ArrayList;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.io.Charsets;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.hadoop.util.StringUtils;
/**
@ -44,36 +47,28 @@ class CppGenerator extends CodeGenerator {
throws IOException {
name = new File(destDir, (new File(name)).getName()).getAbsolutePath();
FileWriter cc = new FileWriter(name+".cc");
try {
FileWriter hh = new FileWriter(name+".hh");
try {
String fileName = (new File(name)).getName();
hh.write("#ifndef __"+
StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
hh.write("#define __"+
StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
hh.write("#include \"recordio.hh\"\n");
hh.write("#include \"recordTypeInfo.hh\"\n");
for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
hh.write("#include \""+iter.next().getName()+".hh\"\n");
}
cc.write("#include \""+fileName+".hh\"\n");
cc.write("#include \"utils.hh\"\n");
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
iter.next().genCppCode(hh, cc, options);
}
hh.write("#endif //"+
StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
} finally {
hh.close();
try (Writer cc = new FileWriterWithEncoding(name+".cc", Charsets.UTF_8);
Writer hh = new FileWriterWithEncoding(name+".hh", Charsets.UTF_8)) {
String fileName = (new File(name)).getName();
hh.write("#ifndef __"+
StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
hh.write("#define __"+
StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
hh.write("#include \"recordio.hh\"\n");
hh.write("#include \"recordTypeInfo.hh\"\n");
for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
hh.write("#include \""+iter.next().getName()+".hh\"\n");
}
} finally {
cc.close();
cc.write("#include \""+fileName+".hh\"\n");
cc.write("#include \"utils.hh\"\n");
for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
iter.next().genCppCode(hh, cc, options);
}
hh.write("#endif //"+
StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
}
}
}

View File

@ -19,10 +19,12 @@
package org.apache.hadoop.record.compiler;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
import org.apache.commons.io.Charsets;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -469,11 +471,8 @@ public class JRecord extends JCompType {
cb.append("}\n");
cb.append("}\n");
FileWriter jj = new FileWriter(jfile);
try {
try (Writer jj = new FileWriterWithEncoding(jfile, Charsets.UTF_8)) {
jj.write(cb.toString());
} finally {
jj.close();
}
}
}
@ -545,7 +544,7 @@ public class JRecord extends JCompType {
cb.append("}\n");
}
void genCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
void genCode(Writer hh, Writer cc, ArrayList<String> options)
throws IOException {
CodeBuffer hb = new CodeBuffer();
@ -810,7 +809,7 @@ public class JRecord extends JCompType {
return signature;
}
void genCppCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
void genCppCode(Writer hh, Writer cc, ArrayList<String> options)
throws IOException {
((CppRecord)getCppType()).genCode(hh, cc, options);
}