HADOOP-11600. Fix up source codes to be compiled with Guava 17.0. (ozawa)

This commit is contained in:
Tsuyoshi Ozawa 2015-02-17 21:56:20 +09:00
parent cf4b7f506d
commit 2f0f756b26
3 changed files with 11 additions and 3 deletions

View File

@ -605,6 +605,8 @@ Release 2.7.0 - UNRELEASED
HADOOP-11589. NetUtils.createSocketAddr should trim the input URI. HADOOP-11589. NetUtils.createSocketAddr should trim the input URI.
(Rakesh R via ozawa) (Rakesh R via ozawa)
HADOOP-11600. Fix up source codes to be compiled with Guava 17.0. (ozawa)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-11323. WritableComparator#compare keeps reference to byte array. HADOOP-11323. WritableComparator#compare keeps reference to byte array.

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.fs.shell;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
@ -77,9 +78,14 @@ class XAttrCommands extends FsCommand {
name = StringUtils.popOptionWithArgument("-n", args); name = StringUtils.popOptionWithArgument("-n", args);
String en = StringUtils.popOptionWithArgument("-e", args); String en = StringUtils.popOptionWithArgument("-e", args);
if (en != null) { if (en != null) {
encoding = enValueOfFunc.apply(en.toUpperCase()); try {
encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid/unsupported encoding option specified: " + en);
}
Preconditions.checkArgument(encoding != null, Preconditions.checkArgument(encoding != null,
"Invalid/unsupported encoding option specified: " + en); "Invalid/unsupported encoding option specified: " + en);
} }
boolean r = StringUtils.popOption("-R", args); boolean r = StringUtils.popOption("-R", args);

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.LimitInputStream;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.permission.PermissionStatus;
@ -33,6 +32,7 @@ import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeId; import org.apache.hadoop.hdfs.server.namenode.INodeId;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.LimitInputStream;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.fusesource.leveldbjni.JniDBFactory; import org.fusesource.leveldbjni.JniDBFactory;
import org.iq80.leveldb.DB; import org.iq80.leveldb.DB;