HBASE-17908 Upgrade guava

Pull in guava 22.0 by using the shaded version up in new hbase-thirdparty project.

In poms, exclude guava everywhere except on hadoop-common. Do this so
we minimize transitive includes. hadoop-common is needed because hadoop
Configuration uses guava doing preconditions.

Everywhere we used guava, instead use shaded so fix a load of imports.

Stopwatch API changed as did hashing and toStringHelper which is now
in MoreObjects class. Otherwise, minimal changes to come up on 22.0
This commit is contained in:
Michael Stack 2017-07-06 22:43:46 -07:00
parent 7941b83aaf
commit 890d92a90c
591 changed files with 1414 additions and 1021 deletions

View File

@ -209,6 +209,12 @@
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId> <artifactId>hbase-hadoop-compat</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>

View File

@ -148,8 +148,8 @@
<artifactId>commons-logging</artifactId> <artifactId>commons-logging</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>guava</artifactId> <artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.protobuf</groupId> <groupId>com.google.protobuf</groupId>
@ -205,6 +205,17 @@
<groupId>org.apache.curator</groupId> <groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId> <artifactId>curator-client</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-crypto</artifactId>
<version>${commons-crypto.version}</version>
<exclusions>
<exclusion>
<groupId>net.java.dev.jna</groupId>
<artifactId>jna</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>

View File

@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.hbase.util.PairOfSameType;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable; import edu.umd.cs.findbugs.annotations.Nullable;

View File

@ -21,7 +21,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.NO_NONCE_GENERATOR;
import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey; import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey;
import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY; import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import io.netty.util.HashedWheelTimer; import io.netty.util.HashedWheelTimer;

View File

@ -26,6 +26,12 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.function.Function; import java.util.function.Function;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import io.netty.util.Timeout;
import io.netty.util.TimerTask;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;

View File

@ -20,7 +20,7 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import static com.google.common.base.Preconditions.checkArgument; import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull;
import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts; import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
import io.netty.util.HashedWheelTimer; import io.netty.util.HashedWheelTimer;
@ -499,4 +499,4 @@ class AsyncRpcRetryingCallerFactory {
public <T> ServerRequestCallerBuilder<T> serverRequest() { public <T> ServerRequestCallerBuilder<T> serverRequest() {
return new ServerRequestCallerBuilder<>(); return new ServerRequestCallerBuilder<>();
} }
} }

View File

@ -28,7 +28,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
import static org.apache.hadoop.hbase.client.ConnectionUtils.updateResultsMetrics; import static org.apache.hadoop.hbase.client.ConnectionUtils.updateResultsMetrics;
import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMetrics; import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMetrics;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import io.netty.util.HashedWheelTimer; import io.netty.util.HashedWheelTimer;
import io.netty.util.Timeout; import io.netty.util.Timeout;

View File

@ -21,7 +21,7 @@ import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hbase.client.ConnectionUtils.allOf; import static org.apache.hadoop.hbase.client.ConnectionUtils.allOf;
import static org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly; import static org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import java.util.List; import java.util.List;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;

View File

@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize; import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;

View File

@ -15,7 +15,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize; import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize;
import java.io.IOException; import java.io.IOException;

View File

@ -21,7 +21,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize;
import static org.apache.hadoop.hbase.client.ConnectionUtils.createScanResultCache; import static org.apache.hadoop.hbase.client.ConnectionUtils.createScanResultCache;
import static org.apache.hadoop.hbase.client.ConnectionUtils.incRegionCountMetrics; import static org.apache.hadoop.hbase.client.ConnectionUtils.incRegionCountMetrics;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;

View File

@ -15,7 +15,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* Configuration parameters for the connection. * Configuration parameters for the connection.

View File

@ -122,7 +122,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import edu.umd.cs.findbugs.annotations.Nullable; import edu.umd.cs.findbugs.annotations.Nullable;

View File

@ -21,8 +21,8 @@ import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW; import static org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW; import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException; import java.lang.reflect.UndeclaredThrowableException;

View File

@ -204,7 +204,7 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors;
import com.google.protobuf.Message; import com.google.protobuf.Message;
import com.google.protobuf.RpcController; import com.google.protobuf.RpcController;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies; import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
// DO NOT MAKE USE OF THESE IMPORTS! THEY ARE HERE FOR COPROCESSOR ENDPOINTS ONLY. // DO NOT MAKE USE OF THESE IMPORTS! THEY ARE HERE FOR COPROCESSOR ENDPOINTS ONLY.
// Internally, we use shaded protobuf. This below are part of our public API. // Internally, we use shaded protobuf. This below are part of our public API.
import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors;
@ -1333,4 +1333,4 @@ public class HTable implements Table {
} }
return mutator; return mutator;
} }
} }

View File

@ -19,8 +19,8 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.AbstractMap.SimpleEntry; import java.util.AbstractMap.SimpleEntry;

View File

@ -26,7 +26,7 @@ import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.RatioGauge; import com.codahale.metrics.RatioGauge;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActi
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* Callable that handles the <code>multi</code> method call going against a single * Callable that handles the <code>multi</code> method call going against a single

View File

@ -50,11 +50,11 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import com.google.common.io.ByteArrayDataInput; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
import com.google.common.io.ByteArrayDataOutput; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
import com.google.common.io.ByteStreams; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
@InterfaceAudience.Public @InterfaceAudience.Public
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable, public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.util.Map.Entry; import java.util.Map.Entry;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.security.visibility.Authorizations;
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Public @InterfaceAudience.Public

View File

@ -524,4 +524,4 @@ class RawAsyncTableImpl implements RawAsyncTable {
(loc, error) -> onLocateComplete(stubMaker, callable, callback, locs, nonNullEndKey, (loc, error) -> onLocateComplete(stubMaker, callable, callback, locs, nonNullEndKey,
endKeyInclusive, new AtomicBoolean(false), new AtomicInteger(0), loc, error)); endKeyInclusive, new AtomicBoolean(false), new AtomicInteger(0), loc, error));
} }
} }

View File

@ -100,4 +100,4 @@ class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel {
public byte[] getLastRegion() { public byte[] getLastRegion() {
return lastRegion; return lastRegion;
} }
} }

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Objects; import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
import com.google.protobuf.Descriptors.MethodDescriptor; import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message; import com.google.protobuf.Message;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
/** /**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* Simple exponential backoff policy on for the client that uses a percent^4 times the * Simple exponential backoff policy on for the client that uses a percent^4 times the

View File

@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
/** /**
* Provides server side metrics related to scan operations. * Provides server side metrics related to scan operations.

View File

@ -58,8 +58,8 @@ import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/** /**
* <p> * <p>

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.Strings;
import com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/** /**

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;

View File

@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* This filter is used for selecting only those keys with columns that are * This filter is used for selecting only those keys with columns that are

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* This is a generic filter to be used to filter by comparison. It takes an * This is a generic filter to be used to filter by comparison. It takes an
* operator (equal, greater, not equal, etc) and a byte [] comparator. * operator (equal, greater, not equal, etc) and a byte [] comparator.

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/** /**

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.UnsafeAccess; import org.apache.hadoop.hbase.util.UnsafeAccess;
import org.apache.hadoop.hbase.util.UnsafeAvailChecker; import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key. * This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key.

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* A Filter that stops after the given row. There is no "RowStopFilter" because * A Filter that stops after the given row. There is no "RowStopFilter" because

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/** /**

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/** /**
* Implementation of Filter interface that limits results to a specific page * Implementation of Filter interface that limits results to a specific page

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp}

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
/** /**

View File

@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.ipc;
import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE; import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE;
import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException; import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache; import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.net.SocketAddress; import java.net.SocketAddress;

View File

@ -243,4 +243,4 @@ public final class CoprocessorRpcUtils {
} }
return new DoNotRetryIOException(controller.errorText()); return new DoNotRetryIOException(controller.errorText());
} }
} }

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import io.netty.channel.Channel; import io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup; import io.netty.channel.EventLoopGroup;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import java.net.SocketAddress; import java.net.SocketAddress;

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.replication;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import com.google.common.collect.ImmutableSet; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -22,7 +22,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase.replication; package org.apache.hadoop.hbase.replication;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.security; package org.apache.hadoop.hbase.security;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;

View File

@ -30,9 +30,9 @@ import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessCont
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse;
import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import com.google.protobuf.ByteString; import com.google.protobuf.ByteString;
import com.google.protobuf.RpcController; import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException; import com.google.protobuf.ServiceException;
@ -766,4 +766,4 @@ public class AccessControlUtil {
.setPermission(ret) .setPermission(ret)
).build(); ).build();
} }
} }

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.VersionedWritable; import org.apache.hadoop.io.VersionedWritable;
import com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
/** /**
* Base permissions instance representing the ability to perform a given set * Base permissions instance representing the ability to perform a given set

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.hbase.security.access; package org.apache.hadoop.hbase.security.access;
import com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.security.access.Permission.Action;

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.zookeeper;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource; import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSourceImpl; import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSourceImpl;

View File

@ -24,7 +24,7 @@ import static org.apache.hadoop.hbase.HConstants.SPLIT_LOGDIR_NAME;
import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT; import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT;
import static org.apache.hadoop.hbase.HRegionInfo.DEFAULT_REPLICA_ID; import static org.apache.hadoop.hbase.HRegionInfo.DEFAULT_REPLICA_ID;
import com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
import java.util.Optional; import java.util.Optional;
import java.util.stream.IntStream; import java.util.stream.IntStream;

View File

@ -286,7 +286,7 @@ public class TestInterfaceAudienceAnnotations {
* Checks whether all the classes in client and common modules contain * Checks whether all the classes in client and common modules contain
* {@link InterfaceAudience} annotations. * {@link InterfaceAudience} annotations.
*/ */
@Test @Ignore @Test
public void testInterfaceAudienceAnnotation() public void testInterfaceAudienceAnnotation()
throws ClassNotFoundException, IOException, LinkageError { throws ClassNotFoundException, IOException, LinkageError {
@ -327,7 +327,7 @@ public class TestInterfaceAudienceAnnotations {
* Checks whether all the classes in client and common modules that are marked * Checks whether all the classes in client and common modules that are marked
* InterfaceAudience.Public do not have {@link InterfaceStability} annotations. * InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
*/ */
@Test @Ignore @Test
public void testNoInterfaceStabilityAnnotationForPublicAPI() public void testNoInterfaceStabilityAnnotationForPublicAPI()
throws ClassNotFoundException, IOException, LinkageError { throws ClassNotFoundException, IOException, LinkageError {
@ -411,7 +411,7 @@ public class TestInterfaceAudienceAnnotations {
0, classes.size()); 0, classes.size());
} }
@Test @Ignore @Test
public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError { public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError {
Set<Class<?>> classes = findPublicClasses(); Set<Class<?>> classes = findPublicClasses();
List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<>(); List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<>();
@ -443,7 +443,7 @@ public class TestInterfaceAudienceAnnotations {
return classes; return classes;
} }
@Test @Ignore @Test
public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError { public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError {
Set<Class<?>> classes = findPublicClasses(); Set<Class<?>> classes = findPublicClasses();
List<Triple<Class<?>, Method, Class<?>>> protosParamType = new ArrayList<>(); List<Triple<Class<?>, Method, Class<?>>> protosParamType = new ArrayList<>();
@ -463,7 +463,7 @@ public class TestInterfaceAudienceAnnotations {
protosParamType.size()); protosParamType.size());
} }
@Test @Ignore @Test
public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError { public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError {
Set<Class<?>> classes = findPublicClasses(); Set<Class<?>> classes = findPublicClasses();
List<Class<?>> classList = new ArrayList<>(); List<Class<?>> classList = new ArrayList<>();

View File

@ -88,7 +88,7 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.Mockito; import org.mockito.Mockito;
import com.google.common.base.Stopwatch; import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
@ -722,14 +722,13 @@ public class TestClientNoCluster extends Configured implements Tool {
TableName tableName = TableName.valueOf(BIG_USER_TABLE); TableName tableName = TableName.valueOf(BIG_USER_TABLE);
if (get) { if (get) {
try (Table table = sharedConnection.getTable(tableName)){ try (Table table = sharedConnection.getTable(tableName)){
Stopwatch stopWatch = new Stopwatch(); Stopwatch stopWatch = Stopwatch.createStarted();
stopWatch.start();
for (int i = 0; i < namespaceSpan; i++) { for (int i = 0; i < namespaceSpan; i++) {
byte [] b = format(rd.nextLong()); byte [] b = format(rd.nextLong());
Get g = new Get(b); Get g = new Get(b);
table.get(g); table.get(g);
if (i % printInterval == 0) { if (i % printInterval == 0) {
LOG.info("Get " + printInterval + "/" + stopWatch.elapsedMillis()); LOG.info("Get " + printInterval + "/" + stopWatch.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS));
stopWatch.reset(); stopWatch.reset();
stopWatch.start(); stopWatch.start();
} }
@ -739,15 +738,14 @@ public class TestClientNoCluster extends Configured implements Tool {
} }
} else { } else {
try (BufferedMutator mutator = sharedConnection.getBufferedMutator(tableName)) { try (BufferedMutator mutator = sharedConnection.getBufferedMutator(tableName)) {
Stopwatch stopWatch = new Stopwatch(); Stopwatch stopWatch = Stopwatch.createStarted();
stopWatch.start();
for (int i = 0; i < namespaceSpan; i++) { for (int i = 0; i < namespaceSpan; i++) {
byte [] b = format(rd.nextLong()); byte [] b = format(rd.nextLong());
Put p = new Put(b); Put p = new Put(b);
p.addColumn(HConstants.CATALOG_FAMILY, b, b); p.addColumn(HConstants.CATALOG_FAMILY, b, b);
mutator.mutate(p); mutator.mutate(p);
if (i % printInterval == 0) { if (i % printInterval == 0) {
LOG.info("Put " + printInterval + "/" + stopWatch.elapsedMillis()); LOG.info("Put " + printInterval + "/" + stopWatch.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS));
stopWatch.reset(); stopWatch.reset();
stopWatch.start(); stopWatch.start();
} }

View File

@ -27,7 +27,7 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import com.google.common.base.Strings; import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@ -219,10 +219,9 @@
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- General dependencies -->
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>guava</artifactId> <artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* This Cell is an implementation of {@link ByteBufferCell} where the data resides in * This Cell is an implementation of {@link ByteBufferCell} where the data resides in

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.primitives.Longs; import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs;
/** /**
* Compare two HBase cells. Do not use this method comparing <code>-ROOT-</code> or * Compare two HBase cells. Do not use this method comparing <code>-ROOT-</code> or

View File

@ -27,7 +27,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer; import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer;

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* An HBase Key/Value. This is the fundamental HBase Type. * An HBase Key/Value. This is the fundamental HBase Type.

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.Strings;
import com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
@InterfaceAudience.Private @InterfaceAudience.Private
public class KeyValueTestUtil { public class KeyValueTestUtil {

View File

@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import com.google.common.base.Function; import org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
import com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/** /**
* static convenience methods for dealing with KeyValues and collections of KeyValues * static convenience methods for dealing with KeyValues and collections of KeyValues

View File

@ -25,7 +25,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* ScheduledChore is a task performed on a period in hbase. ScheduledChores become active once * ScheduledChore is a task performed on a period in hbase. ScheduledChores become active once

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.net.InetAddresses; import org.apache.hadoop.hbase.shaded.com.google.common.net.InetAddresses;
/** /**

View File

@ -141,7 +141,8 @@ public final class TableName implements Comparable<TableName> {
throw new IllegalArgumentException("Name is null or empty"); throw new IllegalArgumentException("Name is null or empty");
} }
int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(tableName, int namespaceDelimIndex =
org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(tableName,
(byte) NAMESPACE_DELIM); (byte) NAMESPACE_DELIM);
if (namespaceDelimIndex < 0){ if (namespaceDelimIndex < 0){
isLegalTableQualifierName(tableName); isLegalTableQualifierName(tableName);
@ -435,7 +436,8 @@ public final class TableName implements Comparable<TableName> {
} }
} }
int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(fullName, int namespaceDelimIndex =
org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(fullName,
(byte) NAMESPACE_DELIM); (byte) NAMESPACE_DELIM);
if (namespaceDelimIndex < 0) { if (namespaceDelimIndex < 0) {

View File

@ -22,7 +22,7 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;

View File

@ -17,8 +17,8 @@
package org.apache.hadoop.hbase.io; package org.apache.hadoop.hbase.io;
import static com.google.common.base.Preconditions.checkArgument; import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull;
import java.io.FilterInputStream; import java.io.FilterInputStream;
import java.io.IOException; import java.io.IOException;

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.MD5Hash; import org.apache.hadoop.hbase.util.MD5Hash;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* Crypto context. Encapsulates an encryption algorithm and its key material. * Crypto context. Encapsulates an encryption algorithm and its key material.

View File

@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.io.crypto.Context;
import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* AES-128, provided by the JCE * AES-128, provided by the JCE

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Decryptor;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving

View File

@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.io.crypto.Context;
import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.io.crypto.aes; package org.apache.hadoop.hbase.io.crypto.aes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.commons.crypto.stream.CryptoInputStream; import org.apache.commons.crypto.stream.CryptoInputStream;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.io.crypto.aes; package org.apache.hadoop.hbase.io.crypto.aes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.commons.crypto.stream.CryptoOutputStream; import org.apache.commons.crypto.stream.CryptoOutputStream;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;

View File

@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Compressor;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* Encapsulates a data block compressed using a particular encoding algorithm. * Encapsulates a data block compressed using a particular encoding algorithm.

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Compressor;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* A default implementation of {@link HFileBlockEncodingContext}. It will * A default implementation of {@link HFileBlockEncodingContext}. It will

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.io.hadoopbackport; package org.apache.hadoop.hbase.io.hadoopbackport;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/** /**
* WALDictionary using an LRU eviction algorithm. Uses a linked list running * WALDictionary using an LRU eviction algorithm. Uses a linked list running

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/* /*
* It seems like as soon as somebody sets himself to the task of creating VInt encoding, his mind * It seems like as soon as somebody sets himself to the task of creating VInt encoding, his mind

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.net;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.google.common.net.HostAndPort; import org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort;
/** /**
* An immutable type to hold a hostname and port combo, like an Endpoint * An immutable type to hold a hostname and port combo, like an Endpoint
@ -46,7 +46,7 @@ public class Address implements Comparable<Address> {
} }
public String getHostname() { public String getHostname() {
return this.hostAndPort.getHostText(); return this.hostAndPort.getHost();
} }
public int getPort() { public int getPort() {

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ObjectIntPair; import org.apache.hadoop.hbase.util.ObjectIntPair;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* Provides a unified view of all the underlying ByteBuffers and will look as if a bigger * Provides a unified view of all the underlying ByteBuffers and will look as if a bigger

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.nio; package org.apache.hadoop.hbase.nio;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;

View File

@ -28,7 +28,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import com.google.common.cache.LoadingCache; import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.hbase.util.Methods;

View File

@ -24,13 +24,13 @@ import java.util.concurrent.Callable;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import com.google.common.cache.CacheBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache; import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.ListenableFuture; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.BaseConfigurable; import org.apache.hadoop.hbase.BaseConfigurable;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* Extends the basic {@link SimpleByteRange} implementation with position * Extends the basic {@link SimpleByteRange} implementation with position

Some files were not shown because too many files have changed in this diff Show More