HADOOP-15214. Make Hadoop compatible with Guava 21.0.

Contributed by Igor Dvorzhak
This commit is contained in:
Steve Loughran 2018-02-08 10:55:54 -08:00
parent 8faf0b50d4
commit 996796f104
3 changed files with 4 additions and 5 deletions

View File

@ -38,12 +38,12 @@ import java.util.jar.JarInputStream;
import java.util.jar.Manifest; import java.util.jar.Manifest;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.google.common.io.NullOutputStream;
import org.apache.commons.io.input.TeeInputStream; import org.apache.commons.io.input.TeeInputStream;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IOUtils.NullOutputStream;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.crypto.key.kms.server; package org.apache.hadoop.crypto.key.kms.server;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import org.apache.hadoop.util.KMSUtil; import org.apache.hadoop.util.KMSUtil;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -32,6 +31,7 @@ import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.crypto.key.kms.KMSClientProvider; import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
import org.apache.hadoop.security.token.delegation.web.HttpUserGroupInformation; import org.apache.hadoop.security.token.delegation.web.HttpUserGroupInformation;
import org.apache.hadoop.util.StopWatch;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -555,7 +555,7 @@ public class KMS {
throws Exception { throws Exception {
LOG.trace("Entering reencryptEncryptedKeys method."); LOG.trace("Entering reencryptEncryptedKeys method.");
try { try {
final Stopwatch sw = new Stopwatch().start(); final StopWatch sw = new StopWatch().start();
checkNotEmpty(name, "name"); checkNotEmpty(name, "name");
checkNotNull(jsonPayload, "jsonPayload"); checkNotNull(jsonPayload, "jsonPayload");
final UserGroupInformation user = HttpUserGroupInformation.get(); final UserGroupInformation user = HttpUserGroupInformation.get();

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -672,7 +671,7 @@ public class ReencryptionHandler implements Runnable {
if (batch.isEmpty()) { if (batch.isEmpty()) {
return new ReencryptionTask(zoneNodeId, 0, batch); return new ReencryptionTask(zoneNodeId, 0, batch);
} }
final Stopwatch kmsSW = new Stopwatch().start(); final StopWatch kmsSW = new StopWatch().start();
int numFailures = 0; int numFailures = 0;
String result = "Completed"; String result = "Completed";