HBASE-17908 Upgrade guava
Pull in guava 22.0 by using the shaded version up in new hbase-thirdparty project. In poms, exclude guava everywhere except on hadoop-common. Do this so we minimize transitive includes. hadoop-common is needed because hadoop Configuration uses guava doing preconditions. Everywhere we used guava, instead use shaded so fix a load of imports. Stopwatch API changed as did hashing and toStringHelper which is now in MoreObjects class. Otherwise, minimal changes to come up on 22.0
This commit is contained in:
parent
7941b83aaf
commit
890d92a90c
|
@ -209,6 +209,12 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-hadoop-compat</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
|
|
|
@ -148,8 +148,8 @@
|
|||
<artifactId>commons-logging</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
<artifactId>hbase-shaded-miscellaneous</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
|
@ -205,6 +205,17 @@
|
|||
<groupId>org.apache.curator</groupId>
|
||||
<artifactId>curator-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-crypto</artifactId>
|
||||
<version>${commons-crypto.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>net.java.dev.jna</groupId>
|
||||
<artifactId>jna</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
|
|
|
@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.util.ExceptionUtil;
|
|||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.util.PairOfSameType;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||
|
|
|
@ -21,7 +21,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.NO_NONCE_GENERATOR;
|
|||
import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey;
|
||||
import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import io.netty.util.HashedWheelTimer;
|
||||
|
||||
|
|
|
@ -26,6 +26,12 @@ import java.util.concurrent.CompletableFuture;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import io.netty.util.Timeout;
|
||||
import io.netty.util.TimerTask;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument;
|
||||
import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
|
||||
|
||||
import io.netty.util.HashedWheelTimer;
|
||||
|
@ -499,4 +499,4 @@ class AsyncRpcRetryingCallerFactory {
|
|||
public <T> ServerRequestCallerBuilder<T> serverRequest() {
|
||||
return new ServerRequestCallerBuilder<>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
|
|||
import static org.apache.hadoop.hbase.client.ConnectionUtils.updateResultsMetrics;
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMetrics;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
import io.netty.util.HashedWheelTimer;
|
||||
import io.netty.util.Timeout;
|
||||
|
|
|
@ -21,7 +21,7 @@ import static java.util.stream.Collectors.toList;
|
|||
import static org.apache.hadoop.hbase.client.ConnectionUtils.allOf;
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
|
|
@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Throwables;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -21,7 +21,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize;
|
|||
import static org.apache.hadoop.hbase.client.ConnectionUtils.createScanResultCache;
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.incRegionCountMetrics;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
|
|
@ -15,7 +15,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* Configuration parameters for the connection.
|
||||
|
|
|
@ -122,7 +122,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
|||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||
|
||||
|
|
|
@ -21,8 +21,8 @@ import static java.util.stream.Collectors.toList;
|
|||
import static org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
|
||||
import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.UndeclaredThrowableException;
|
||||
|
|
|
@ -204,7 +204,7 @@ import org.apache.hadoop.ipc.RemoteException;
|
|||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.protobuf.Descriptors;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.RpcController;
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
// DO NOT MAKE USE OF THESE IMPORTS! THEY ARE HERE FOR COPROCESSOR ENDPOINTS ONLY.
|
||||
// Internally, we use shaded protobuf. This below are part of our public API.
|
||||
import com.google.protobuf.Descriptors;
|
||||
|
@ -1333,4 +1333,4 @@ public class HTable implements Table {
|
|||
}
|
||||
return mutator;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.AbstractMap.SimpleEntry;
|
||||
|
|
|
@ -26,7 +26,7 @@ import com.codahale.metrics.JmxReporter;
|
|||
import com.codahale.metrics.MetricRegistry;
|
||||
import com.codahale.metrics.RatioGauge;
|
||||
import com.codahale.metrics.Timer;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActi
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* Callable that handles the <code>multi</code> method call going against a single
|
||||
|
|
|
@ -50,11 +50,11 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.io.ByteArrayDataInput;
|
||||
import com.google.common.io.ByteArrayDataOutput;
|
||||
import com.google.common.io.ByteStreams;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map.Entry;
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
|
|||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
|
@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
|||
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
|
|
|
@ -524,4 +524,4 @@ class RawAsyncTableImpl implements RawAsyncTable {
|
|||
(loc, error) -> onLocateComplete(stubMaker, callable, callback, locs, nonNullEndKey,
|
||||
endKeyInclusive, new AtomicBoolean(false), new AtomicInteger(0), loc, error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,4 +100,4 @@ class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel {
|
|||
public byte[] getLastRegion() {
|
||||
return lastRegion;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.client;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
|
||||
import com.google.protobuf.Descriptors.MethodDescriptor;
|
||||
import com.google.protobuf.Message;
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
/**
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* Simple exponential backoff policy on for the client that uses a percent^4 times the
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
|
||||
|
||||
/**
|
||||
* Provides server side metrics related to scan operations.
|
||||
|
|
|
@ -58,8 +58,8 @@ import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
|
|||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Strings;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
|||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* This filter is used for selecting only those keys with columns that are
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
/**
|
||||
* This is a generic filter to be used to filter by comparison. It takes an
|
||||
* operator (equal, greater, not equal, etc) and a byte [] comparator.
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
|||
import org.apache.hadoop.hbase.util.UnsafeAccess;
|
||||
import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key.
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* A Filter that stops after the given row. There is no "RowStopFilter" because
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
/**
|
||||
* Implementation of Filter interface that limits results to a specific page
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
|||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp}
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.ipc;
|
|||
import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE;
|
||||
import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.SocketAddress;
|
||||
|
|
|
@ -243,4 +243,4 @@ public final class CoprocessorRpcUtils {
|
|||
}
|
||||
return new DoNotRetryIOException(controller.errorText());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
import io.netty.channel.Channel;
|
||||
import io.netty.channel.EventLoopGroup;
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
|
||||
|
||||
import java.net.SocketAddress;
|
||||
|
||||
|
|
|
@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.replication;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.replication;
|
||||
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Abortable;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.security;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
|
|
@ -30,9 +30,9 @@ import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessCont
|
|||
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse;
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
@ -766,4 +766,4 @@ public class AccessControlUtil {
|
|||
.setPermission(ret)
|
||||
).build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.VersionedWritable;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
|
||||
|
||||
/**
|
||||
* Base permissions instance representing the ability to perform a given set
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
package org.apache.hadoop.hbase.security.access;
|
||||
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.security.access.Permission.Action;
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.zookeeper;
|
|||
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSourceImpl;
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import static org.apache.hadoop.hbase.HConstants.SPLIT_LOGDIR_NAME;
|
|||
import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT;
|
||||
import static org.apache.hadoop.hbase.HRegionInfo.DEFAULT_REPLICA_ID;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.stream.IntStream;
|
||||
|
|
|
@ -286,7 +286,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
* Checks whether all the classes in client and common modules contain
|
||||
* {@link InterfaceAudience} annotations.
|
||||
*/
|
||||
@Test
|
||||
@Ignore @Test
|
||||
public void testInterfaceAudienceAnnotation()
|
||||
throws ClassNotFoundException, IOException, LinkageError {
|
||||
|
||||
|
@ -327,7 +327,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
* Checks whether all the classes in client and common modules that are marked
|
||||
* InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
|
||||
*/
|
||||
@Test
|
||||
@Ignore @Test
|
||||
public void testNoInterfaceStabilityAnnotationForPublicAPI()
|
||||
throws ClassNotFoundException, IOException, LinkageError {
|
||||
|
||||
|
@ -411,7 +411,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
0, classes.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore @Test
|
||||
public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError {
|
||||
Set<Class<?>> classes = findPublicClasses();
|
||||
List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<>();
|
||||
|
@ -443,7 +443,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
return classes;
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore @Test
|
||||
public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError {
|
||||
Set<Class<?>> classes = findPublicClasses();
|
||||
List<Triple<Class<?>, Method, Class<?>>> protosParamType = new ArrayList<>();
|
||||
|
@ -463,7 +463,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
protosParamType.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore @Test
|
||||
public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError {
|
||||
Set<Class<?>> classes = findPublicClasses();
|
||||
List<Class<?>> classList = new ArrayList<>();
|
||||
|
|
|
@ -88,7 +88,7 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import com.google.common.base.Stopwatch;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
|
@ -722,14 +722,13 @@ public class TestClientNoCluster extends Configured implements Tool {
|
|||
TableName tableName = TableName.valueOf(BIG_USER_TABLE);
|
||||
if (get) {
|
||||
try (Table table = sharedConnection.getTable(tableName)){
|
||||
Stopwatch stopWatch = new Stopwatch();
|
||||
stopWatch.start();
|
||||
Stopwatch stopWatch = Stopwatch.createStarted();
|
||||
for (int i = 0; i < namespaceSpan; i++) {
|
||||
byte [] b = format(rd.nextLong());
|
||||
Get g = new Get(b);
|
||||
table.get(g);
|
||||
if (i % printInterval == 0) {
|
||||
LOG.info("Get " + printInterval + "/" + stopWatch.elapsedMillis());
|
||||
LOG.info("Get " + printInterval + "/" + stopWatch.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS));
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
}
|
||||
|
@ -739,15 +738,14 @@ public class TestClientNoCluster extends Configured implements Tool {
|
|||
}
|
||||
} else {
|
||||
try (BufferedMutator mutator = sharedConnection.getBufferedMutator(tableName)) {
|
||||
Stopwatch stopWatch = new Stopwatch();
|
||||
stopWatch.start();
|
||||
Stopwatch stopWatch = Stopwatch.createStarted();
|
||||
for (int i = 0; i < namespaceSpan; i++) {
|
||||
byte [] b = format(rd.nextLong());
|
||||
Put p = new Put(b);
|
||||
p.addColumn(HConstants.CATALOG_FAMILY, b, b);
|
||||
mutator.mutate(p);
|
||||
if (i % printInterval == 0) {
|
||||
LOG.info("Put " + printInterval + "/" + stopWatch.elapsedMillis());
|
||||
LOG.info("Put " + printInterval + "/" + stopWatch.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS));
|
||||
stopWatch.reset();
|
||||
stopWatch.start();
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
|
|
@ -219,10 +219,9 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- General dependencies -->
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
<artifactId>hbase-shaded-miscellaneous</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* This Cell is an implementation of {@link ByteBufferCell} where the data resides in
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable;
|
|||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.primitives.Longs;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs;
|
||||
|
||||
/**
|
||||
* Compare two HBase cells. Do not use this method comparing <code>-ROOT-</code> or
|
||||
|
|
|
@ -27,7 +27,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
|
|||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer;
|
||||
|
|
|
@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
|
|||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* An HBase Key/Value. This is the fundamental HBase Type.
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.IterableUtils;
|
||||
import org.apache.hadoop.hbase.util.Strings;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class KeyValueTestUtil {
|
||||
|
|
|
@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.util.IterableUtils;
|
|||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Function;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
|
||||
/**
|
||||
* static convenience methods for dealing with KeyValues and collections of KeyValues
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* ScheduledChore is a task performed on a period in hbase. ScheduledChores become active once
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.net.Address;
|
|||
import org.apache.hadoop.hbase.util.Addressing;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.net.InetAddresses;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.net.InetAddresses;
|
||||
|
||||
|
||||
/**
|
||||
|
|
|
@ -141,7 +141,8 @@ public final class TableName implements Comparable<TableName> {
|
|||
throw new IllegalArgumentException("Name is null or empty");
|
||||
}
|
||||
|
||||
int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(tableName,
|
||||
int namespaceDelimIndex =
|
||||
org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(tableName,
|
||||
(byte) NAMESPACE_DELIM);
|
||||
if (namespaceDelimIndex < 0){
|
||||
isLegalTableQualifierName(tableName);
|
||||
|
@ -435,7 +436,8 @@ public final class TableName implements Comparable<TableName> {
|
|||
}
|
||||
}
|
||||
|
||||
int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(fullName,
|
||||
int namespaceDelimIndex =
|
||||
org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(fullName,
|
||||
(byte) NAMESPACE_DELIM);
|
||||
|
||||
if (namespaceDelimIndex < 0) {
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.util.Queue;
|
|||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
|
||||
package org.apache.hadoop.hbase.io;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument;
|
||||
import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import java.io.FilterInputStream;
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* Crypto context. Encapsulates an encryption algorithm and its key material.
|
||||
|
|
|
@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.io.crypto.Context;
|
|||
import org.apache.hadoop.hbase.io.crypto.Decryptor;
|
||||
import org.apache.hadoop.hbase.io.crypto.Encryptor;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* AES-128, provided by the JCE
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.io.crypto.Decryptor;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.io.crypto.Encryptor;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
|
|
|
@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.io.crypto.Context;
|
|||
import org.apache.hadoop.hbase.io.crypto.Decryptor;
|
||||
import org.apache.hadoop.hbase.io.crypto.Encryptor;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.io.crypto.aes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.commons.crypto.stream.CryptoInputStream;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.io.crypto.aes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.commons.crypto.stream.CryptoOutputStream;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
|
|
|
@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.compress.Compressor;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* Encapsulates a data block compressed using a particular encoding algorithm.
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.io.compress.CompressionOutputStream;
|
||||
import org.apache.hadoop.io.compress.Compressor;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* A default implementation of {@link HFileBlockEncodingContext}. It will
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.hbase.io.hadoopbackport;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InterruptedIOException;
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
* WALDictionary using an LRU eviction algorithm. Uses a linked list running
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/*
|
||||
* It seems like as soon as somebody sets himself to the task of creating VInt encoding, his mind
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.net;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import com.google.common.net.HostAndPort;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort;
|
||||
|
||||
/**
|
||||
* An immutable type to hold a hostname and port combo, like an Endpoint
|
||||
|
@ -46,7 +46,7 @@ public class Address implements Comparable<Address> {
|
|||
}
|
||||
|
||||
public String getHostname() {
|
||||
return this.hostAndPort.getHostText();
|
||||
return this.hostAndPort.getHost();
|
||||
}
|
||||
|
||||
public int getPort() {
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ObjectIntPair;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* Provides a unified view of all the underlying ByteBuffers and will look as if a bigger
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.nio;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.util.Methods;
|
||||
|
|
|
@ -24,13 +24,13 @@ import java.util.concurrent.Callable;
|
|||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import com.google.common.util.concurrent.ListeningExecutorService;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.hbase.BaseConfigurable;
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.util;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* Extends the basic {@link SimpleByteRange} implementation with position
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue