HADOOP-12731. Remove useless boxing/unboxing code. Contributed by Kousuke Saruta.

(cherry picked from commit 736eb17a79)
This commit is contained in:
Akira Ajisaka 2016-01-25 13:47:29 +09:00
parent a9386cae6e
commit 4edd7bd820
13 changed files with 43 additions and 57 deletions

View File

@ -35,7 +35,7 @@ public class TestZKSignerSecretProvider {
// rollover every 2 sec // rollover every 2 sec
private final int timeout = 4000; private final int timeout = 4000;
private final long rolloverFrequency = Long.valueOf(timeout / 2); private final long rolloverFrequency = timeout / 2;
@Before @Before
public void setup() throws Exception { public void setup() throws Exception {

View File

@ -1012,6 +1012,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12730. Hadoop streaming -mapper and -reducer options are wrongly HADOOP-12730. Hadoop streaming -mapper and -reducer options are wrongly
documented as required. (Kengo Seki via aajisaka) documented as required. (Kengo Seki via aajisaka)
HADOOP-12731. Remove useless boxing/unboxing code.
(Kousuke Saruta via aajisaka)
Release 2.7.3 - UNRELEASED Release 2.7.3 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -771,7 +771,7 @@ public boolean delete(final Path f, final boolean recursive)
@Override @Override
public Boolean next(final AbstractFileSystem fs, final Path p) public Boolean next(final AbstractFileSystem fs, final Path p)
throws IOException, UnresolvedLinkException { throws IOException, UnresolvedLinkException {
return Boolean.valueOf(fs.delete(p, recursive)); return fs.delete(p, recursive);
} }
}.resolve(this, absF); }.resolve(this, absF);
} }
@ -905,7 +905,7 @@ public boolean setReplication(final Path f, final short replication)
@Override @Override
public Boolean next(final AbstractFileSystem fs, final Path p) public Boolean next(final AbstractFileSystem fs, final Path p)
throws IOException, UnresolvedLinkException { throws IOException, UnresolvedLinkException {
return Boolean.valueOf(fs.setReplication(p, replication)); return fs.setReplication(p, replication);
} }
}.resolve(this, absF); }.resolve(this, absF);
} }

View File

@ -256,10 +256,10 @@ public Args(String arg)
} }
} }
private Integer parseConfiggedPort(String portStr) private int parseConfiggedPort(String portStr)
throws BadFencingConfigurationException { throws BadFencingConfigurationException {
try { try {
return Integer.valueOf(portStr); return Integer.parseInt(portStr);
} catch (NumberFormatException nfe) { } catch (NumberFormatException nfe) {
throw new BadFencingConfigurationException( throw new BadFencingConfigurationException(
"Port number '" + portStr + "' invalid"); "Port number '" + portStr + "' invalid");

View File

@ -131,40 +131,24 @@ protected synchronized void copy(Writable other) {
protected AbstractMapWritable() { protected AbstractMapWritable() {
this.conf = new AtomicReference<Configuration>(); this.conf = new AtomicReference<Configuration>();
addToMap(ArrayWritable.class, addToMap(ArrayWritable.class, (byte)-127);
Byte.valueOf(Integer.valueOf(-127).byteValue())); addToMap(BooleanWritable.class, (byte)-126);
addToMap(BooleanWritable.class, addToMap(BytesWritable.class, (byte)-125);
Byte.valueOf(Integer.valueOf(-126).byteValue())); addToMap(FloatWritable.class, (byte)-124);
addToMap(BytesWritable.class, addToMap(IntWritable.class, (byte)-123);
Byte.valueOf(Integer.valueOf(-125).byteValue())); addToMap(LongWritable.class, (byte)-122);
addToMap(FloatWritable.class, addToMap(MapWritable.class, (byte)-121);
Byte.valueOf(Integer.valueOf(-124).byteValue())); addToMap(MD5Hash.class, (byte)-120);
addToMap(IntWritable.class, addToMap(NullWritable.class, (byte)-119);
Byte.valueOf(Integer.valueOf(-123).byteValue())); addToMap(ObjectWritable.class, (byte)-118);
addToMap(LongWritable.class, addToMap(SortedMapWritable.class, (byte)-117);
Byte.valueOf(Integer.valueOf(-122).byteValue())); addToMap(Text.class, (byte)-116);
addToMap(MapWritable.class, addToMap(TwoDArrayWritable.class, (byte)-115);
Byte.valueOf(Integer.valueOf(-121).byteValue()));
addToMap(MD5Hash.class,
Byte.valueOf(Integer.valueOf(-120).byteValue()));
addToMap(NullWritable.class,
Byte.valueOf(Integer.valueOf(-119).byteValue()));
addToMap(ObjectWritable.class,
Byte.valueOf(Integer.valueOf(-118).byteValue()));
addToMap(SortedMapWritable.class,
Byte.valueOf(Integer.valueOf(-117).byteValue()));
addToMap(Text.class,
Byte.valueOf(Integer.valueOf(-116).byteValue()));
addToMap(TwoDArrayWritable.class,
Byte.valueOf(Integer.valueOf(-115).byteValue()));
// UTF8 is deprecated so we don't support it // UTF8 is deprecated so we don't support it
addToMap(VIntWritable.class, addToMap(VIntWritable.class, (byte)-114);
Byte.valueOf(Integer.valueOf(-114).byteValue())); addToMap(VLongWritable.class, (byte)-113);
addToMap(VLongWritable.class,
Byte.valueOf(Integer.valueOf(-113).byteValue()));
} }
/** @return the conf */ /** @return the conf */

View File

@ -203,9 +203,8 @@ private static void reportDuplicateEntry(final String header,
* Integer, e.g. 4294967294 maps to -2 and 4294967295 maps to -1. * Integer, e.g. 4294967294 maps to -2 and 4294967295 maps to -1.
*/ */
private static Integer parseId(final String idStr) { private static Integer parseId(final String idStr) {
Long longVal = Long.parseLong(idStr); long longVal = Long.parseLong(idStr);
int intVal = longVal.intValue(); return Integer.valueOf((int)longVal);
return Integer.valueOf(intVal);
} }
/** /**
@ -605,8 +604,8 @@ static StaticMapping parseStaticMap(File staticMapFile)
// We know the line is fine to parse without error checking like this // We know the line is fine to parse without error checking like this
// since it matched the regex above. // since it matched the regex above.
String firstComponent = lineMatcher.group(1); String firstComponent = lineMatcher.group(1);
int remoteId = parseId(lineMatcher.group(2)); Integer remoteId = parseId(lineMatcher.group(2));
int localId = parseId(lineMatcher.group(3)); Integer localId = parseId(lineMatcher.group(3));
if (firstComponent.equals("uid")) { if (firstComponent.equals("uid")) {
uidMapping.put(localId, remoteId); uidMapping.put(localId, remoteId);
} else { } else {

View File

@ -177,7 +177,7 @@ public int compareTo(Key other) {
} }
if (result == 0) { if (result == 0) {
result = Double.valueOf(this.weight - other.weight).intValue(); result = (int)(this.weight - other.weight);
} }
return result; return result;
} }

View File

@ -44,7 +44,7 @@ public class TestSshFenceByTcpPort {
private static final InetSocketAddress TEST_ADDR = private static final InetSocketAddress TEST_ADDR =
new InetSocketAddress(TEST_FENCING_HOST, new InetSocketAddress(TEST_FENCING_HOST,
Integer.valueOf(TEST_FENCING_PORT)); Integer.parseInt(TEST_FENCING_PORT));
private static final HAServiceTarget TEST_TARGET = private static final HAServiceTarget TEST_TARGET =
new DummyHAService(HAServiceState.ACTIVE, TEST_ADDR); new DummyHAService(HAServiceState.ACTIVE, TEST_ADDR);

View File

@ -479,6 +479,6 @@ public static void waitForThreadTermination(String regex,
*/ */
public static void assumeInNativeProfile() { public static void assumeInNativeProfile() {
Assume.assumeTrue( Assume.assumeTrue(
Boolean.valueOf(System.getProperty("runningWithNative", "false"))); Boolean.parseBoolean(System.getProperty("runningWithNative", "false")));
} }
} }

View File

@ -330,7 +330,7 @@ void updateHDFSDistCacheFilesList(JobStory jobdesc) throws IOException {
// Check if visibilities are available because older hadoop versions // Check if visibilities are available because older hadoop versions
// didn't have public, private Distributed Caches separately. // didn't have public, private Distributed Caches separately.
boolean visibility = boolean visibility =
(visibilities == null) ? true : Boolean.valueOf(visibilities[i]); (visibilities == null) || Boolean.parseBoolean(visibilities[i]);
if (isLocalDistCacheFile(files[i], user, visibility)) { if (isLocalDistCacheFile(files[i], user, visibility)) {
// local FS based distributed cache file. // local FS based distributed cache file.
// Create this file on the pseudo local FS on the fly (i.e. when the // Create this file on the pseudo local FS on the fly (i.e. when the
@ -514,7 +514,7 @@ void configureDistCacheFiles(Configuration conf, JobConf jobConf)
// Check if visibilities are available because older hadoop versions // Check if visibilities are available because older hadoop versions
// didn't have public, private Distributed Caches separately. // didn't have public, private Distributed Caches separately.
boolean visibility = boolean visibility =
(visibilities == null) ? true : Boolean.valueOf(visibilities[i]); (visibilities == null) || Boolean.parseBoolean(visibilities[i]);
if (isLocalDistCacheFile(files[i], user, visibility)) { if (isLocalDistCacheFile(files[i], user, visibility)) {
// local FS based distributed cache file. // local FS based distributed cache file.
// Create this file on the pseudo local FS. // Create this file on the pseudo local FS.

View File

@ -172,7 +172,7 @@ public void verify(ArrayList<JobStory> submitted) throws Exception {
assertTrue("Gridmix job name is not in the expected format.", assertTrue("Gridmix job name is not in the expected format.",
jobName.equals(GridmixJob.JOB_NAME_PREFIX + jobSeqNum)); jobName.equals(GridmixJob.JOB_NAME_PREFIX + jobSeqNum));
final FileStatus stat = GridmixTestUtils.dfs.getFileStatus(new Path( final FileStatus stat = GridmixTestUtils.dfs.getFileStatus(new Path(
GridmixTestUtils.DEST, "" + Integer.valueOf(jobSeqNum))); GridmixTestUtils.DEST, "" + Integer.parseInt(jobSeqNum)));
assertEquals("Wrong owner for " + jobName, spec.getUser(), assertEquals("Wrong owner for " + jobName, spec.getUser(),
stat.getOwner()); stat.getOwner());
final int nMaps = spec.getNumberMaps(); final int nMaps = spec.getNumberMaps();

View File

@ -95,7 +95,7 @@ private void validateGetFileStatus(FileSystem pfs, Path path,
// validate fileSize // validate fileSize
String[] parts = path.toUri().getPath().split("\\."); String[] parts = path.toUri().getPath().split("\\.");
long expectedFileSize = Long.valueOf(parts[parts.length - 1]); long expectedFileSize = Long.parseLong(parts[parts.length - 1]);
assertEquals("Invalid file size.", expectedFileSize, stat.getLen()); assertEquals("Invalid file size.", expectedFileSize, stat.getLen());
} else { } else {
assertTrue("getFileStatus() did not throw Exception for invalid file " assertTrue("getFileStatus() did not throw Exception for invalid file "

View File

@ -45,7 +45,7 @@ public static void main(String[] args) throws IOException, InterruptedException
int seconds = 5; int seconds = 5;
if (args.length >= 1) { if (args.length >= 1) {
try { try {
seconds = Integer.valueOf(args[0]); seconds = Integer.parseInt(args[0]);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
// just use default 5. // just use default 5.
} }