From f9f869f60a334982b739420a614b5d44adf26af0 Mon Sep 17 00:00:00 2001 From: Peter Somogyi Date: Mon, 11 Dec 2017 13:38:44 +0100 Subject: [PATCH] HBASE-19497 Fix findbugs and error-prone warnings in hbase-common (branch-2) Signed-off-by: Apekshit Sharma --- .../apache/hadoop/hbase/ArrayBackedTag.java | 6 +- .../hadoop/hbase/AsyncConsoleAppender.java | 3 +- .../org/apache/hadoop/hbase/AuthUtil.java | 2 +- .../apache/hadoop/hbase/BaseConfigurable.java | 2 +- .../hbase/ByteBufferKeyOnlyKeyValue.java | 2 +- .../hadoop/hbase/ByteBufferKeyValue.java | 3 +- .../apache/hadoop/hbase/ByteBufferTag.java | 2 +- .../hadoop/hbase/CellComparatorImpl.java | 4 +- .../org/apache/hadoop/hbase/CellScanner.java | 1 - .../org/apache/hadoop/hbase/CellUtil.java | 1 + .../org/apache/hadoop/hbase/ChoreService.java | 3 +- .../hadoop/hbase/CompoundConfiguration.java | 2 +- .../org/apache/hadoop/hbase/ExtendedCell.java | 1 + .../hadoop/hbase/HBaseConfiguration.java | 2 +- ...JitterScheduledThreadPoolExecutorImpl.java | 7 +- .../org/apache/hadoop/hbase/KeyValue.java | 19 +-- .../apache/hadoop/hbase/KeyValueTestUtil.java | 2 +- .../org/apache/hadoop/hbase/KeyValueUtil.java | 4 +- .../hadoop/hbase/NamespaceDescriptor.java | 2 +- .../hbase/NoTagsByteBufferKeyValue.java | 2 +- .../apache/hadoop/hbase/NoTagsKeyValue.java | 4 +- .../apache/hadoop/hbase/PrivateCellUtil.java | 8 +- .../org/apache/hadoop/hbase/ServerName.java | 2 +- .../hadoop/hbase/SizeCachedKeyValue.java | 2 +- .../org/apache/hadoop/hbase/TableName.java | 2 +- .../apache/hadoop/hbase/codec/CellCodec.java | 2 +- .../hadoop/hbase/codec/CellCodecWithTags.java | 3 +- .../org/apache/hadoop/hbase/codec/Codec.java | 2 +- .../hadoop/hbase/codec/KeyValueCodec.java | 5 +- .../hbase/codec/KeyValueCodecWithTags.java | 5 +- .../exceptions/UnexpectedStateException.java | 2 +- .../hbase/filter/ByteArrayComparable.java | 2 +- .../hbase/io/ByteArrayOutputStream.java | 2 +- .../hadoop/hbase/io/ByteBuffInputStream.java | 6 +- .../hbase/io/ByteBufferInputStream.java | 2 +- .../hbase/io/ByteBufferListOutputStream.java | 2 +- .../hbase/io/ByteBufferOutputStream.java | 4 +- .../hadoop/hbase/io/ByteBufferPool.java | 4 +- .../io/ByteBufferWriterDataOutputStream.java | 2 +- .../io/ByteBufferWriterOutputStream.java | 3 +- .../hbase/io/ImmutableBytesWritable.java | 5 +- .../hbase/io/TagCompressionContext.java | 2 +- .../org/apache/hadoop/hbase/io/TimeRange.java | 2 +- .../hadoop/hbase/io/compress/Compression.java | 14 +- .../io/compress/ReusableStreamGzipCodec.java | 2 +- .../hadoop/hbase/io/crypto/Context.java | 2 +- .../hbase/io/crypto/CryptoCipherProvider.java | 2 +- .../io/crypto/DefaultCipherProvider.java | 2 +- .../hadoop/hbase/io/crypto/Encryption.java | 3 +- .../hadoop/hbase/io/crypto/aes/AES.java | 5 +- .../hbase/io/crypto/aes/AESDecryptor.java | 3 +- .../hbase/io/crypto/aes/AESEncryptor.java | 3 +- .../hbase/io/crypto/aes/CommonsCryptoAES.java | 5 +- .../crypto/aes/CommonsCryptoAESDecryptor.java | 15 +- .../crypto/aes/CommonsCryptoAESEncryptor.java | 15 +- .../hadoop/hbase/io/crypto/aes/CryptoAES.java | 22 +-- .../io/encoding/AbstractDataBlockEncoder.java | 2 +- .../io/encoding/BufferedDataBlockEncoder.java | 1 + .../hbase/io/encoding/CompressionState.java | 2 +- .../io/encoding/CopyKeyDataBlockEncoder.java | 2 +- .../hbase/io/encoding/DataBlockEncoder.java | 2 +- .../hbase/io/encoding/DataBlockEncoding.java | 19 +-- .../io/encoding/DiffKeyDeltaEncoder.java | 6 +- .../hbase/io/encoding/EncodedDataBlock.java | 10 +- .../io/encoding/FastDiffDeltaEncoder.java | 6 +- .../encoding/HFileBlockDecodingContext.java | 2 +- .../HFileBlockDefaultDecodingContext.java | 2 +- .../HFileBlockDefaultEncodingContext.java | 2 +- .../encoding/HFileBlockEncodingContext.java | 2 +- .../hadoop/hbase/io/encoding/NoneEncoder.java | 4 +- .../io/encoding/PrefixKeyDeltaEncoder.java | 4 +- .../hbase/io/encoding/RowIndexCodecV1.java | 2 +- .../hbase/io/encoding/RowIndexEncoderV1.java | 3 +- .../hbase/io/encoding/RowIndexSeekerV1.java | 8 +- .../hadoopbackport/ThrottledInputStream.java | 5 +- .../hadoop/hbase/io/hfile/BlockType.java | 2 +- .../hadoop/hbase/io/hfile/HFileContext.java | 3 +- .../hbase/io/hfile/HFileContextBuilder.java | 2 +- .../hadoop/hbase/io/util/Dictionary.java | 2 +- .../hadoop/hbase/io/util/LRUDictionary.java | 2 +- .../hadoop/hbase/io/util/StreamUtils.java | 2 +- .../org/apache/hadoop/hbase/nio/ByteBuff.java | 2 +- .../hadoop/hbase/nio/MultiByteBuff.java | 25 +-- .../hadoop/hbase/nio/SingleByteBuff.java | 7 +- .../hadoop/hbase/rsgroup/RSGroupInfo.java | 2 +- .../hadoop/hbase/security/Superusers.java | 8 +- .../apache/hadoop/hbase/security/User.java | 6 +- .../hadoop/hbase/security/UserProvider.java | 21 +-- .../hadoop/hbase/trace/SpanReceiverHost.java | 2 +- .../hbase/types/CopyOnWriteArrayMap.java | 15 +- .../apache/hadoop/hbase/types/DataType.java | 2 +- .../hbase/types/FixedLengthWrapper.java | 34 ++-- .../hadoop/hbase/types/OrderedBlob.java | 14 +- .../hadoop/hbase/types/OrderedBlobVar.java | 10 +- .../hadoop/hbase/types/OrderedBytesBase.java | 22 ++- .../hadoop/hbase/types/OrderedFloat32.java | 23 ++- .../hadoop/hbase/types/OrderedFloat64.java | 23 ++- .../hadoop/hbase/types/OrderedInt16.java | 23 ++- .../hadoop/hbase/types/OrderedInt32.java | 23 ++- .../hadoop/hbase/types/OrderedInt64.java | 23 ++- .../hadoop/hbase/types/OrderedInt8.java | 23 ++- .../hadoop/hbase/types/OrderedNumeric.java | 10 +- .../hadoop/hbase/types/OrderedString.java | 10 +- .../org/apache/hadoop/hbase/types/PBType.java | 7 +- .../apache/hadoop/hbase/types/RawByte.java | 26 ++- .../apache/hadoop/hbase/types/RawBytes.java | 35 ++-- .../hbase/types/RawBytesFixedLength.java | 2 +- .../hbase/types/RawBytesTerminated.java | 2 +- .../apache/hadoop/hbase/types/RawDouble.java | 26 ++- .../apache/hadoop/hbase/types/RawFloat.java | 26 ++- .../apache/hadoop/hbase/types/RawInteger.java | 26 ++- .../apache/hadoop/hbase/types/RawLong.java | 26 ++- .../apache/hadoop/hbase/types/RawShort.java | 26 ++- .../apache/hadoop/hbase/types/RawString.java | 35 ++-- .../hbase/types/RawStringFixedLength.java | 2 +- .../hbase/types/RawStringTerminated.java | 2 +- .../org/apache/hadoop/hbase/types/Struct.java | 50 ++++-- .../hadoop/hbase/types/StructBuilder.java | 14 +- .../hadoop/hbase/types/StructIterator.java | 22 ++- .../hadoop/hbase/types/TerminatedWrapper.java | 45 ++++-- .../org/apache/hadoop/hbase/types/Union2.java | 6 +- .../org/apache/hadoop/hbase/types/Union3.java | 6 +- .../org/apache/hadoop/hbase/types/Union4.java | 6 +- .../hadoop/hbase/util/AbstractByteRange.java | 6 +- .../hadoop/hbase/util/AbstractHBaseTool.java | 2 +- .../org/apache/hadoop/hbase/util/Base64.java | 9 +- .../hadoop/hbase/util/ByteBufferArray.java | 9 +- .../hadoop/hbase/util/ByteBufferUtils.java | 30 ++-- .../org/apache/hadoop/hbase/util/Bytes.java | 16 +- .../hadoop/hbase/util/ChecksumType.java | 2 +- .../hadoop/hbase/util/ClassLoaderBase.java | 3 +- .../hadoop/hbase/util/CommonFSUtils.java | 9 +- .../hbase/util/CoprocessorClassLoader.java | 6 +- .../apache/hadoop/hbase/util/HasThread.java | 3 +- .../org/apache/hadoop/hbase/util/Hash.java | 18 +-- .../apache/hadoop/hbase/util/JRubyFormat.java | 1 + .../apache/hadoop/hbase/util/JSONBean.java | 8 +- .../hadoop/hbase/util/JSONMetricUtil.java | 1 - .../org/apache/hadoop/hbase/util/JVM.java | 7 +- .../hadoop/hbase/util/OrderedBytes.java | 26 +-- .../hadoop/hbase/util/PrettyPrinter.java | 2 +- .../hadoop/hbase/util/ReflectionUtils.java | 6 +- .../hbase/util/RetryCounterFactory.java | 2 +- .../hadoop/hbase/util/RowBloomHashKey.java | 1 - .../hadoop/hbase/util/RowColBloomHashKey.java | 3 +- .../hadoop/hbase/util/SoftObjectPool.java | 3 +- .../org/apache/hadoop/hbase/util/Threads.java | 7 +- .../org/apache/hadoop/hbase/util/Triple.java | 6 +- .../hadoop/hbase/util/UnsafeAccess.java | 9 +- .../hadoop/hbase/util/WeakObjectPool.java | 1 - .../hadoop/hbase/zookeeper/ZKConfig.java | 3 +- .../org/apache/hadoop/hbase/ClassFinder.java | 2 +- .../hbase/HBaseCommonTestingUtility.java | 3 +- .../hbase/ResourceCheckerJUnitListener.java | 24 ++- .../hadoop/hbase/TestByteBufferKeyValue.java | 14 +- .../apache/hadoop/hbase/TestCellBuilder.java | 4 +- .../hadoop/hbase/TestCellComparator.java | 24 +-- .../org/apache/hadoop/hbase/TestCellUtil.java | 44 ++++-- .../apache/hadoop/hbase/TestChoreService.java | 49 +++--- .../apache/hadoop/hbase/TestClassFinder.java | 6 +- .../hbase/TestCompoundConfiguration.java | 47 ++++-- .../hadoop/hbase/TestHBaseConfiguration.java | 7 +- .../hbase/TestIndividualBytesFieldCell.java | 26 +-- .../org/apache/hadoop/hbase/TestKeyValue.java | 7 +- .../apache/hadoop/hbase/TestTableName.java | 77 +++++---- .../org/apache/hadoop/hbase/TestTimeout.java | 2 +- .../java/org/apache/hadoop/hbase/Waiter.java | 4 +- .../hbase/codec/TestCellCodecWithTags.java | 5 +- .../hadoop/hbase/codec/TestKeyValueCodec.java | 4 +- .../codec/TestKeyValueCodecWithTags.java | 5 +- .../io/TestByteBufferListOutputStream.java | 1 + .../hadoop/hbase/io/TestByteBufferPool.java | 4 +- .../hbase/io/TestTagCompressionContext.java | 6 +- .../io/crypto/KeyProviderForTesting.java | 1 - .../hbase/io/crypto/TestCipherProvider.java | 6 +- .../hbase/io/crypto/TestEncryption.java | 1 - .../hbase/io/crypto/TestKeyProvider.java | 6 +- .../io/crypto/TestKeyStoreKeyProvider.java | 4 +- .../hadoop/hbase/io/crypto/aes/TestAES.java | 9 +- .../hbase/io/crypto/aes/TestCommonsAES.java | 32 ++-- .../TestThrottledInputStream.java | 3 +- .../hbase/io/util/TestLRUDictionary.java | 2 +- .../hadoop/hbase/nio/TestMultiByteBuff.java | 17 +- .../hbase/types/TestCopyOnWriteMaps.java | 34 ++-- .../apache/hadoop/hbase/types/TestStruct.java | 149 ++++++++++++++---- .../hbase/util/AbstractHBaseToolTest.java | 19 ++- .../hbase/util/ClassLoaderTestHelper.java | 6 +- .../hbase/util/RedundantKVGenerator.java | 4 +- .../apache/hadoop/hbase/util/TestAvlUtil.java | 18 +-- .../apache/hadoop/hbase/util/TestBase64.java | 1 - .../hbase/util/TestByteBufferArray.java | 8 +- .../hbase/util/TestByteBufferUtils.java | 4 +- .../TestByteRangeWithKVSerialization.java | 2 +- .../apache/hadoop/hbase/util/TestBytes.java | 38 +++-- .../hadoop/hbase/util/TestCommonFSUtils.java | 14 +- .../hbase/util/TestConcatenatedLists.java | 2 + .../util/TestCoprocessorClassLoader.java | 20 ++- .../hbase/util/TestDynamicClassLoader.java | 2 +- .../hadoop/hbase/util/TestJRubyFormat.java | 3 +- .../hbase/util/TestLoadTestKVGenerator.java | 9 +- .../hadoop/hbase/util/TestOrderedBytes.java | 56 ++++--- .../hadoop/hbase/util/TestVersionInfo.java | 3 +- .../hadoop/hbase/zookeeper/TestZKConfig.java | 6 +- 203 files changed, 1286 insertions(+), 808 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java index 12f7a1541a5..142fc4f89ed 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.util.Bytes; /** * This is a {@link Tag} implementation in which value is backed by an on heap byte array. @@ -100,6 +100,7 @@ public class ArrayBackedTag implements Tag { /** * @return The byte array backing this Tag. */ + @Override public byte[] getValueArray() { return this.bytes; } @@ -107,6 +108,7 @@ public class ArrayBackedTag implements Tag { /** * @return the tag type */ + @Override public byte getType() { return this.type; } @@ -114,6 +116,7 @@ public class ArrayBackedTag implements Tag { /** * @return Length of actual tag bytes within the backed buffer */ + @Override public int getValueLength() { return this.length - INFRASTRUCTURE_SIZE; } @@ -121,6 +124,7 @@ public class ArrayBackedTag implements Tag { /** * @return Offset of actual tag bytes within the backed buffer */ + @Override public int getValueOffset() { return this.offset + INFRASTRUCTURE_SIZE; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java index c194c808289..22f5ec2a0d4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.log4j.AsyncAppender; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.PatternLayout; +import org.apache.yetus.audience.InterfaceAudience; /** * Logger class that buffers before trying to log to the specified console. @@ -40,6 +40,7 @@ public class AsyncConsoleAppender extends AsyncAppender { consoleAppender.setTarget(value); } + @Override public void activateOptions() { consoleAppender.activateOptions(); super.activateOptions(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java index 7c9578cf087..fcfdee84550 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java @@ -24,11 +24,11 @@ import java.net.UnknownHostException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.yetus.audience.InterfaceAudience; /** * Utility methods for helping with security tasks. Downstream users diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java index 51e4684a6b1..28cda209f97 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.apache.yetus.audience.InterfaceAudience; /** * HBase version of Hadoop's Configured class that doesn't initialize the diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java index 8a4b7206a6c..3522e2d34c7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * This is a key only Cell implementation which is identical to {@link KeyValue.KeyOnlyKeyValue} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java index cd663129bc9..beadaf6f3a9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java @@ -21,10 +21,10 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -154,6 +154,7 @@ public class ByteBufferKeyValue extends ByteBufferCell implements ExtendedCell { return this.seqId; } + @Override public void setSequenceId(long seqId) { this.seqId = seqId; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferTag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferTag.java index 3b27fc0e5cc..21289d1a370 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferTag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferTag.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.util.ByteBufferUtils; /** * This is a {@link Tag} implementation in which value is backed by diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java index 264984a483d..2dd1bdb79a5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -21,10 +21,10 @@ package org.apache.hadoop.hbase; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue.Type; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java index 2aabb832eab..9d0b8d82eaa 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; /** * An interface for iterating through a sequence of cells. Similar to Java's Iterator, but without diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 0940e6395b4..3a8307cb534 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Map.Entry; import java.util.NavigableMap; import java.util.Optional; + import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.ByteBufferUtils; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index 052c728019c..ff437db5fc5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -27,12 +27,13 @@ import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; + /** * ChoreService is a service that can be used to schedule instances of {@link ScheduledChore} to run * periodically while sharing threads. The ChoreService is backed by a diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java index b597fe6ca12..b9d4a3e4153 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java @@ -30,8 +30,8 @@ import java.util.Map; import org.apache.commons.collections4.iterators.UnmodifiableIterator; import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Do a shallow merge of multiple KV configuration pools. This is a very useful diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java index 36b07a81b7a..81ca0189777 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; + import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.yetus.audience.InterfaceAudience; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index 4ac76c5f4a6..6012fe85f81 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -25,9 +25,9 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.hbase.zookeeper.ZKConfig; +import org.apache.yetus.audience.InterfaceAudience; /** * Adds HBase configuration files to a Configuration diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java index 0f7cd7a2b65..f9cdf1ea2a1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase; -import org.apache.yetus.audience.InterfaceAudience; - import java.util.concurrent.Callable; import java.util.concurrent.Delayed; import java.util.concurrent.ExecutionException; @@ -31,6 +29,8 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import org.apache.yetus.audience.InterfaceAudience; + /** * ScheduledThreadPoolExecutor that will add some jitter to the RunnableScheduledFuture.getDelay. * @@ -51,12 +51,13 @@ public class JitterScheduledThreadPoolExecutorImpl extends ScheduledThreadPoolEx this.spread = spread; } + @Override protected java.util.concurrent.RunnableScheduledFuture decorateTask( Runnable runnable, java.util.concurrent.RunnableScheduledFuture task) { return new JitteredRunnableScheduledFuture<>(task); } - + @Override protected java.util.concurrent.RunnableScheduledFuture decorateTask( Callable callable, java.util.concurrent.RunnableScheduledFuture task) { return new JitteredRunnableScheduledFuture<>(task); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 727eede7046..7093650af82 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -34,12 +34,13 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.RawComparator; import org.apache.yetus.audience.InterfaceAudience; + +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * An HBase Key/Value. This is the fundamental HBase Type. *

@@ -79,7 +80,7 @@ public class KeyValue implements ExtendedCell { private static final Log LOG = LogFactory.getLog(KeyValue.class); - public static final long FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself + public static final int FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself ClassSize.REFERENCE + // pointer to "bytes" 2 * Bytes.SIZEOF_INT + // offset, length Bytes.SIZEOF_LONG;// memstoreTS @@ -195,9 +196,9 @@ public class KeyValue implements ExtendedCell { */ public static long getKeyValueDataStructureSize(int klength, int vlength, int tagsLength) { if (tagsLength == 0) { - return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength; + return (long) KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength; } - return KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength; + return (long) KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength; } /** @@ -211,7 +212,7 @@ public class KeyValue implements ExtendedCell { * @return the key data structure length */ public static long getKeyDataStructureSize(int rlength, int flength, int qlength) { - return KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength; + return (long) KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength; } /** @@ -1524,6 +1525,7 @@ public class KeyValue implements ExtendedCell { * Returns any tags embedded in the KeyValue. Used in testcases. * @return The tags */ + @Override public List getTags() { int tagsLength = getTagsLength(); if (tagsLength == 0) { @@ -2282,7 +2284,7 @@ public class KeyValue implements ExtendedCell { int length = kv.getLength(); out.writeInt(length); out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; + return (long) length + Bytes.SIZEOF_INT; } /** @@ -2304,7 +2306,7 @@ public class KeyValue implements ExtendedCell { public static long oswrite(final KeyValue kv, final OutputStream out, final boolean withTags) throws IOException { ByteBufferUtils.putInt(out, kv.getSerializedSize(withTags)); - return kv.write(out, withTags) + Bytes.SIZEOF_INT; + return (long) kv.write(out, withTags) + Bytes.SIZEOF_INT; } @Override @@ -2350,13 +2352,12 @@ public class KeyValue implements ExtendedCell { */ @Override public long heapSize() { - long sum = FIXED_OVERHEAD; /* * Deep object overhead for this KV consists of two parts. The first part is the KV object * itself, while the second part is the backing byte[]. We will only count the array overhead * from the byte[] only if this is the first KV in there. */ - return ClassSize.align(sum) + + return ClassSize.align(FIXED_OVERHEAD) + (offset == 0 ? ClassSize.sizeOfByteArray(length) // count both length and object overhead : length); // only count the number of bytes diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java index 7467d67ca0e..3d9b4f92ea1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java @@ -22,10 +22,10 @@ import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.hbase.util.Strings; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 6fd37c0e286..60e404925b7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -29,13 +29,13 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.KeyValue.Type; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -515,7 +515,7 @@ public class KeyValueUtil { int length = kv.getLength(); out.writeInt(length); out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; + return (long) length + Bytes.SIZEOF_INT; } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java index 94b719c4d93..d1b69ba16c5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java @@ -26,8 +26,8 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Namespace POJO class. Used to represent and define namespaces. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.java index 82b243b5af8..f00961f6660 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * An extension of the ByteBufferKeyValue where the tags length is always 0 diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsKeyValue.java index 088aff57557..3f8ef546548 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NoTagsKeyValue.java @@ -22,11 +22,11 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.io.OutputStream; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** - * An extension of the KeyValue where the tags length is always 0 + * An extension of the KeyValue where the tags length is always 0 */ @InterfaceAudience.Private public class NoTagsKeyValue extends KeyValue { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index 79b8b314e2b..df080f3630c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -135,7 +135,7 @@ public final class PrivateCellUtil { static class TagRewriteCell implements ExtendedCell { protected Cell cell; protected byte[] tags; - private static final long HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * ClassSize.REFERENCE; + private static final int HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * ClassSize.REFERENCE; /** * @param cell The original Cell which it rewrites @@ -317,7 +317,7 @@ public final class PrivateCellUtil { protected ByteBufferCell cell; protected byte[] tags; - private static final long HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * ClassSize.REFERENCE; + private static final int HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * ClassSize.REFERENCE; /** * @param cell The original ByteBufferCell which it rewrites @@ -1501,7 +1501,7 @@ public final class PrivateCellUtil { } private static class FirstOnRowCell extends EmptyCell { - private static final long FIXED_HEAPSIZE = + private static final int FIXED_HEAPSIZE = ClassSize.OBJECT // object + ClassSize.REFERENCE // row array + Bytes.SIZEOF_INT // row offset @@ -1829,7 +1829,7 @@ public final class PrivateCellUtil { } private static class LastOnRowCell extends EmptyCell { - private static final long FIXED_OVERHEAD = + private static final int FIXED_OVERHEAD = ClassSize.OBJECT // object + ClassSize.REFERENCE // row array + Bytes.SIZEOF_INT // row offset diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java index b00b7073638..10f658c2320 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -24,10 +24,10 @@ import java.util.List; import java.util.Locale; import java.util.regex.Pattern; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.net.InetAddresses; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/SizeCachedKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/SizeCachedKeyValue.java index 383446cb12e..aa649c7cdf9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/SizeCachedKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/SizeCachedKeyValue.java @@ -19,8 +19,8 @@ */ package org.apache.hadoop.hbase; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * This class is an extension to KeyValue where rowLen and keyLen are cached. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java index 74a4c8a3600..5a437947917 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java @@ -24,8 +24,8 @@ import java.util.Arrays; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Immutable POJO class for representing a table name. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java index ef7df270dd5..8a1b9af88fb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java @@ -27,10 +27,10 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java index 95b7fe81b41..f33c45c224a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java @@ -27,10 +27,10 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Basic Cell codec that just writes out all the individual elements of a Cell including the tags. @@ -85,6 +85,7 @@ public class CellCodecWithTags implements Codec { super(in); } + @Override protected Cell parseCell() throws IOException { byte[] row = readByteArray(this.in); byte[] family = readByteArray(in); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java index bbbdcf6133f..01359eab2d6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java @@ -22,9 +22,9 @@ import java.io.OutputStream; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.yetus.audience.InterfaceAudience; /** * Encoder/Decoder for Cell. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index d2de33189ea..9a5db3c6409 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -27,13 +27,13 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.NoTagsByteBufferKeyValue; import org.apache.hadoop.hbase.NoTagsKeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Codec that does KeyValue version 1 serialization. - * + * *

Encodes Cell as serialized in KeyValue with total length prefix. * This is how KVs were serialized in Puts, Deletes and Results pre-0.96. Its what would * happen if you called the Writable#write KeyValue implementation. This encoder will fail @@ -69,6 +69,7 @@ public class KeyValueCodec implements Codec { super(in); } + @Override protected Cell parseCell() throws IOException { // No tags here return KeyValueUtil.iscreate(in, false); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java index fe5ad5a4d41..bf9b375655b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java @@ -22,14 +22,14 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.ByteBufferKeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Codec that does KeyValue version 1 serialization with serializing tags also. @@ -75,6 +75,7 @@ public class KeyValueCodecWithTags implements Codec { super(in); } + @Override protected Cell parseCell() throws IOException { // create KeyValue with tags return KeyValueUtil.iscreate(in, true); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/UnexpectedStateException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/UnexpectedStateException.java index 575b961bf69..e776acc18f7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/UnexpectedStateException.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/UnexpectedStateException.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.exceptions; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.HBaseIOException; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class UnexpectedStateException extends HBaseIOException { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java index 82841fe7763..f0258f56726 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** Base class for byte array comparators */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteArrayOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteArrayOutputStream.java index bd371daf9f6..38c986a49d7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteArrayOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteArrayOutputStream.java @@ -22,9 +22,9 @@ import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.util.Arrays; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Our own implementation of ByteArrayOutputStream where all methods are NOT synchronized and diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffInputStream.java index d7923396f3f..152d0dbe805 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffInputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffInputStream.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io; import java.io.InputStream; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.yetus.audience.InterfaceAudience; /** * Not thread safe! @@ -42,6 +42,7 @@ public class ByteBuffInputStream extends InputStream { * because the end of the stream has been reached, the value -1 is returned. * @return the next byte of data, or -1 if the end of the stream has been reached. */ + @Override public int read() { if (this.buf.hasRemaining()) { return (this.buf.get() & 0xff); @@ -58,6 +59,7 @@ public class ByteBuffInputStream extends InputStream { * @return the total number of bytes actually read into the buffer, or -1 if not even * 1 byte can be read because the end of the stream has been reached. */ + @Override public int read (byte b[], int off, int len) { int avail = available(); if (avail <= 0) { @@ -81,6 +83,7 @@ public class ByteBuffInputStream extends InputStream { * @param n the number of bytes to be skipped. * @return the actual number of bytes skipped. */ + @Override public long skip(long n) { long k = Math.min(n, available()); if (k <= 0) { @@ -94,6 +97,7 @@ public class ByteBuffInputStream extends InputStream { * @return the number of remaining bytes that can be read (or skipped * over) from this input stream. */ + @Override public int available() { return this.buf.remaining(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferInputStream.java index acb8a1c1e9c..2811b1b8155 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferInputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferInputStream.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.io; import java.io.InputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Not thread safe! diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java index 08b4eea751e..61cc170dcec 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java @@ -25,8 +25,8 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * An OutputStream which writes data into ByteBuffers. It will try to get ByteBuffer, as and when diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index c72394e7b60..9bdad534df0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -27,9 +27,9 @@ import java.nio.ByteOrder; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Not thread safe! @@ -142,6 +142,7 @@ public class ByteBufferOutputStream extends OutputStream ByteBufferUtils.copyFromArrayToBuffer(curBuf, b, off, len); } + @Override public void write(ByteBuffer b, int off, int len) throws IOException { checkSizeAndGrow(len); ByteBufferUtils.copyFromBufferToBuffer(b, curBuf, off, len); @@ -153,6 +154,7 @@ public class ByteBufferOutputStream extends OutputStream * @param i the int to write * @throws IOException if an I/O error occurs. */ + @Override public void writeInt(int i) throws IOException { checkSizeAndGrow(Bytes.SIZEOF_INT); ByteBufferUtils.putInt(this.curBuf, i); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java index 82ce52d4e28..784c88fcf6f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java @@ -22,12 +22,12 @@ import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; + /** * Like Hadoops' ByteBufferPool only you do not specify desired size when getting a ByteBuffer. This * pool keeps an upper bound on the count of ByteBuffers in the pool and a fixed size of ByteBuffer diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterDataOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterDataOutputStream.java index 83635d61202..c085abc132a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterDataOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterDataOutputStream.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Our extension of DataOutputStream which implements ByteBufferWriter diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java index 537e4bd363a..6d46fa84c0b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java @@ -21,9 +21,9 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * When deal with OutputStream which is not ByteBufferWriter type, wrap it with this class. We will @@ -74,6 +74,7 @@ public class ByteBufferWriterOutputStream extends OutputStream this.os.write(b); } + @Override public void write(byte b[], int off, int len) throws IOException { this.os.write(b, off, len); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java index 1384e81cb6e..949e905a69c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java @@ -24,10 +24,10 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; +import org.apache.yetus.audience.InterfaceAudience; /** * A byte sequence that is usable as a key or value. Based on @@ -146,6 +146,7 @@ implements WritableComparable { return this.offset; } + @Override public void readFields(final DataInput in) throws IOException { this.length = in.readInt(); this.bytes = new byte[this.length]; @@ -153,6 +154,7 @@ implements WritableComparable { this.offset = 0; } + @Override public void write(final DataOutput out) throws IOException { out.writeInt(this.length); out.write(this.bytes, this.offset, this.length); @@ -173,6 +175,7 @@ implements WritableComparable { * @return Positive if left is bigger than right, 0 if they are equal, and * negative if left is smaller than right. */ + @Override public int compareTo(ImmutableBytesWritable that) { return WritableComparator.compareBytes( this.bytes, this.offset, this.length, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java index 32997e0d554..d646250c6ac 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java @@ -26,13 +26,13 @@ import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Tag; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.util.Dictionary; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.IOUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Context that holds the dictionary for Tag compression and doing the compress/uncompress. This diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java index fee652a7a63..e4503463571 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.io; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Represents an interval of version timestamps. Presumes timestamps between diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java index 41f4fb8ab8b..ec4ce38d3a7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -27,7 +27,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; @@ -39,6 +38,7 @@ import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.DoNotPool; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Compression related stuff. @@ -104,7 +104,7 @@ public final class Compression { LZO("lzo") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lzoCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -131,7 +131,7 @@ public final class Compression { }, GZ("gz") { private volatile transient GzipCodec codec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override DefaultCodec getCodec(Configuration conf) { @@ -183,7 +183,7 @@ public final class Compression { SNAPPY("snappy") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec snappyCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -210,7 +210,7 @@ public final class Compression { LZ4("lz4") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lz4Codec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -237,7 +237,7 @@ public final class Compression { BZIP2("bzip2") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec bzipCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -264,7 +264,7 @@ public final class Compression { ZSTD("zstd") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec zStandardCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java index 39a76625363..3db9d7ec440 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java @@ -24,12 +24,12 @@ import java.util.zip.GZIPOutputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.JVM; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.CompressorStream; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.zlib.ZlibFactory; +import org.apache.yetus.audience.InterfaceAudience; /** * Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java index cbbe9f6f451..4cc430279ef 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java @@ -21,8 +21,8 @@ import java.security.Key; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.MD5Hash; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java index 3bc66fb3f45..2ed422c5497 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.io.crypto; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.aes.CommonsCryptoAES; +import org.apache.yetus.audience.InterfaceAudience; /** * The default cipher provider. Supports AES via the Commons Crypto. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java index 896e20fcddd..6e36d5adcfe 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.io.crypto; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.crypto.aes.AES; +import org.apache.yetus.audience.InterfaceAudience; /** * The default cipher provider. Supports AES via the JCE. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index 5fb9c4777b3..49cc61f2318 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -27,7 +27,6 @@ import java.security.spec.InvalidKeySpecException; import java.util.Arrays; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; - import javax.crypto.SecretKeyFactory; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; @@ -38,10 +37,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * A facade for encryption algorithms and related support. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java index bc57debad9e..6c73bb4970b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java @@ -23,18 +23,17 @@ import java.io.OutputStream; import java.security.GeneralSecurityException; import java.security.Key; import java.security.SecureRandom; - import javax.crypto.spec.SecretKeySpec; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.CipherProvider; import org.apache.hadoop.hbase.io.crypto.Context; import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java index 79d2c9b26b6..5ad3e80a000 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java @@ -21,12 +21,11 @@ import java.io.InputStream; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; - import javax.crypto.spec.IvParameterSpec; +import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java index f3f993cc6d1..fe79c8fc7a5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java @@ -22,12 +22,11 @@ import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.SecureRandom; - import javax.crypto.spec.IvParameterSpec; +import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java index e79d631ace1..412ea7947d4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java @@ -24,20 +24,19 @@ import java.security.GeneralSecurityException; import java.security.Key; import java.security.SecureRandom; import java.util.Properties; - import javax.crypto.spec.SecretKeySpec; import org.apache.commons.crypto.cipher.CryptoCipherFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.CipherProvider; import org.apache.hadoop.hbase.io.crypto.Context; import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java index 82e68f31c46..bb2290f657a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java @@ -17,17 +17,18 @@ */ package org.apache.hadoop.hbase.io.crypto.aes; -import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; -import org.apache.commons.crypto.stream.CryptoInputStream; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.io.crypto.Decryptor; - -import javax.crypto.spec.IvParameterSpec; import java.io.IOException; import java.io.InputStream; import java.security.Key; import java.util.Properties; +import javax.crypto.spec.IvParameterSpec; + +import org.apache.commons.crypto.stream.CryptoInputStream; +import org.apache.hadoop.hbase.io.crypto.Decryptor; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java index dc4a6ad4397..1b2f23f63bf 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.hbase.io.crypto.aes; -import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; -import org.apache.commons.crypto.stream.CryptoOutputStream; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.io.crypto.Encryptor; - -import javax.crypto.spec.IvParameterSpec; import java.io.IOException; import java.io.OutputStream; import java.security.Key; import java.security.SecureRandom; import java.util.Properties; +import javax.crypto.spec.IvParameterSpec; + +import org.apache.commons.crypto.stream.CryptoOutputStream; +import org.apache.hadoop.hbase.io.crypto.Encryptor; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java index 0bc23f79224..f0f41b92309 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java @@ -18,11 +18,12 @@ package org.apache.hadoop.hbase.io.crypto.aes; -import org.apache.commons.crypto.cipher.CryptoCipher; -import org.apache.commons.crypto.utils.Utils; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; - +import java.io.IOException; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Properties; import javax.crypto.Cipher; import javax.crypto.Mac; import javax.crypto.SecretKey; @@ -30,12 +31,11 @@ import javax.crypto.ShortBufferException; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import javax.security.sasl.SaslException; -import java.io.IOException; -import java.security.InvalidAlgorithmParameterException; -import java.security.InvalidKeyException; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import java.util.Properties; + +import org.apache.commons.crypto.cipher.CryptoCipher; +import org.apache.commons.crypto.utils.Utils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; /** * AES encryption and decryption. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java index d3c622a32ac..ab95717e3e1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java @@ -23,9 +23,9 @@ import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public abstract class AbstractDataBlockEncoder implements DataBlockEncoder { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 67a337d32eb..9bcda017e5c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -21,6 +21,7 @@ import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; + import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java index e3513fad34f..3a5806efdbf 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.encoding; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.KeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Stores the state of data block encoder at the beginning of new key. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java index 15473108000..8bc797474df 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java @@ -23,10 +23,10 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Just copy data, do not do any kind of compression. Use for comparison and diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java index 7b4036c16ed..e6f339da3b7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java @@ -23,9 +23,9 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.yetus.audience.InterfaceAudience; /** * Encoding of KeyValue. It aims to be fast and efficient using assumptions: diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index f3b5b256a5e..335488b0756 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; import java.io.OutputStream; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Provide access to all data block encoding algorithms. All of the algorithms @@ -172,16 +172,13 @@ public enum DataBlockEncoding { return algorithm; } - protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){ - try { - return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance(); - } catch (InstantiationException e) { - throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (ClassNotFoundException e) { - throw new IllegalArgumentException(e); - } + protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName) { + try { + return (DataBlockEncoder) Class.forName(fullyQualifiedClassName).getDeclaredConstructor() + .newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index 6762bb8f3b9..01f0a9de505 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -23,14 +23,14 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; +import org.apache.yetus.audience.InterfaceAudience; /** * Compress using: @@ -208,7 +208,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell) throws IOException { - byte flag = 0; + int flag = 0; // Do not use more bits that can fit into a byte int kLength = KeyValueUtil.keyLength(cell); int vLength = cell.getValueLength(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index b9f7d771b0c..d7e8a9b6565 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -29,13 +29,13 @@ import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.Compressor; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @@ -257,7 +257,7 @@ public class EncodedDataBlock { } BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream(); baos.writeTo(stream); - this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf); + this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.ourBytes); } catch (IOException e) { throw new RuntimeException(String.format( "Bug in encoding part of algorithm %s. " + @@ -268,11 +268,11 @@ public class EncodedDataBlock { } private static class BufferGrabbingByteArrayOutputStream extends ByteArrayOutputStream { - private byte[] buf; + private byte[] ourBytes; @Override - public void write(byte[] b, int off, int len) { - this.buf = b; + public synchronized void write(byte[] b, int off, int len) { + this.ourBytes = b; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java index a3377769176..baa1856c005 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java @@ -24,14 +24,14 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; +import org.apache.yetus.audience.InterfaceAudience; /** * Encoder similar to {@link DiffKeyDeltaEncoder} but supposedly faster. @@ -253,7 +253,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell) throws IOException { - byte flag = 0; + int flag = 0; // Do not use more bits than will fit into a byte int kLength = KeyValueUtil.keyLength(cell); int vLength = cell.getValueLength(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java index d6972d3e92b..7f29302ed87 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.yetus.audience.InterfaceAudience; /** * A decoding context that is created by a reader's encoder, and is shared diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java index 107f3198f71..d5bf58cb2a3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.compress.Compression; @@ -31,6 +30,7 @@ import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * A default implementation of {@link HFileBlockDecodingContext}. It assumes the diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java index d5168d2be95..2193037da16 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.io.InputStream; import java.security.SecureRandom; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.compress.Compression; @@ -36,6 +35,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java index e3f261a980a..96022294835 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * An encoding context that is created by a writer's encoder, and is shared diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java index 29b855a6fcb..e3092fe8aa7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java @@ -22,11 +22,11 @@ import java.io.DataOutputStream; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class NoneEncoder { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java index a488e489934..63da7e7c4d5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java @@ -24,13 +24,13 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Compress key by storing size of common prefix with previous KeyValue diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java index 530e673e9a8..32933892655 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java @@ -28,13 +28,13 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Store cells following every row's start offset, so we can binary search to a row's cells. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java index fded0f6f6de..92a3a4f089a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java @@ -16,10 +16,9 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparatorImpl; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class RowIndexEncoderV1 { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java index 0ebcae08eba..02ed721bff6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java @@ -20,21 +20,21 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.SizeCachedKeyValue; import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.encoding.AbstractDataBlockEncoder.AbstractEncodedSeeker; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class RowIndexSeekerV1 extends AbstractEncodedSeeker { @@ -365,7 +365,7 @@ public class RowIndexSeekerV1 extends AbstractEncodedSeeker { public Cell toCell() { Cell ret; int cellBufSize = getCellBufSize(); - long seqId = 0l; + long seqId = 0L; if (includesMvcc()) { seqId = memstoreTS; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java index 305f5cde9a0..f5e28b75285 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java @@ -18,15 +18,16 @@ package org.apache.hadoop.hbase.io.hadoopbackport; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.PositionedReadable; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.yetus.audience.InterfaceAudience; + +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * The ThrottleInputStream provides bandwidth throttling on a specified diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java index d8e98d94023..4753813d30e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java @@ -25,9 +25,9 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * Various types of HFile blocks. Ordinal values of these enum constants must not be relied upon. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index 9fc5b6adb89..b5ccda21a44 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -16,8 +16,8 @@ * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; + import org.apache.hadoop.hbase.HConstants; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Encryption; @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; +import org.apache.yetus.audience.InterfaceAudience; /** * This carries the information on some of the meta data about the HFile. This diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index 0f94f343c29..24e23e81a2b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.io.hfile; import org.apache.hadoop.hbase.HConstants; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.util.ChecksumType; +import org.apache.yetus.audience.InterfaceAudience; /** * A builder that helps in building up the HFileContext diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java index 243e8a4a045..a67dd57f074 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Dictionary interface diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java index 0b682b0c8f9..b0dcc864638 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.io.util; import java.nio.ByteBuffer; import java.util.HashMap; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java index a420891a2ab..02f6a6d4c2f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java @@ -23,9 +23,9 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java index ef502ca4159..17b2a7c2186 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java @@ -21,11 +21,11 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * An abstract class that abstracts out as to how the byte buffers are used, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java index 12051c81a09..5274e953d6b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java @@ -24,10 +24,10 @@ import java.nio.ByteBuffer; import java.nio.InvalidMarkException; import java.nio.channels.ReadableByteChannel; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -177,6 +177,7 @@ public class MultiByteBuff extends ByteBuff { * @param index * @return the int value at the given index */ + @Override public int getInt(int index) { // Mostly the index specified will land within this current item. Short circuit for that int itemIndex; @@ -207,6 +208,7 @@ public class MultiByteBuff extends ByteBuff { * @param index * @return the short value at the given index */ + @Override public short getShort(int index) { // Mostly the index specified will land within this current item. Short circuit for that int itemIndex; @@ -228,9 +230,9 @@ public class MultiByteBuff extends ByteBuff { ByteBuffer nextItem = items[itemIndex + 1]; // Get available one byte from this item and remaining one from next short n = 0; - n ^= ByteBufferUtils.toByte(item, offsetInItem) & 0xFF; - n <<= 8; - n ^= ByteBufferUtils.toByte(nextItem, 0) & 0xFF; + n = (short) (n ^ (ByteBufferUtils.toByte(item, offsetInItem) & 0xFF)); + n = (short) (n << 8); + n = (short) (n ^ (ByteBufferUtils.toByte(nextItem, 0) & 0xFF)); return n; } @@ -287,12 +289,12 @@ public class MultiByteBuff extends ByteBuff { // Get available bytes from this item and remaining from next short l = 0; for (int i = offsetInItem; i < item.capacity(); i++) { - l <<= 8; - l ^= ByteBufferUtils.toByte(item, i) & 0xFF; + l = (short) (l << 8); + l = (short) (l ^ (ByteBufferUtils.toByte(item, i) & 0xFF)); } for (int i = 0; i < Bytes.SIZEOF_SHORT - remainingLen; i++) { - l <<= 8; - l ^= ByteBufferUtils.toByte(nextItem, i) & 0xFF; + l = (short) (l << 8); + l = (short) (l ^ (ByteBufferUtils.toByte(item, i) & 0xFF)); } return l; } @@ -327,6 +329,7 @@ public class MultiByteBuff extends ByteBuff { * @param index * @return the long value at the given index */ + @Override public long getLong(int index) { // Mostly the index specified will land within this current item. Short circuit for that int itemIndex; @@ -520,9 +523,9 @@ public class MultiByteBuff extends ByteBuff { return this.curItem.getShort(); } short n = 0; - n ^= get() & 0xFF; - n <<= 8; - n ^= get() & 0xFF; + n = (short) (n ^ (get() & 0xFF)); + n = (short) (n << 8); + n = (short) (n ^ (get() & 0xFF)); return n; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java index 674507e50db..ad95b3e3603 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java @@ -17,20 +17,19 @@ */ package org.apache.hadoop.hbase.nio; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; - import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ObjectIntPair; import org.apache.hadoop.hbase.util.UnsafeAccess; import org.apache.hadoop.hbase.util.UnsafeAvailChecker; - +import org.apache.yetus.audience.InterfaceAudience; import sun.nio.ch.DirectBuffer; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; + /** * An implementation of ByteBuff where a single BB backs the BBI. This just acts * as a wrapper over a normal BB - offheap or onheap diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java index 5408083e42f..f87ec714683 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java @@ -24,8 +24,8 @@ import java.util.SortedSet; import java.util.TreeSet; import org.apache.hadoop.hbase.TableName; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.net.Address; +import org.apache.yetus.audience.InterfaceAudience; /** * Stores the group information of region server groups. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java index db081004e6c..dceafbd087c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java @@ -19,16 +19,16 @@ package org.apache.hadoop.hbase.security; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.yetus.audience.InterfaceAudience; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - /** * Keeps lists of superusers and super groups loaded from HBase configuration, * checks if certain user is regarded as superuser. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java index 1b6f57c7d29..2c743b64013 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java @@ -28,15 +28,17 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; -import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; + import org.apache.hadoop.conf.Configuration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.yetus.audience.InterfaceAudience; + +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; /** * Wrapper to abstract out usage of user and group information in HBase. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java index a231cfac09f..290eb68efe1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java @@ -24,6 +24,14 @@ import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.hbase.BaseConfigurable; +import org.apache.hadoop.security.Groups; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.yetus.audience.InterfaceAudience; + import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; @@ -31,13 +39,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Listenab import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.hbase.BaseConfigurable; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.security.Groups; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.ReflectionUtils; /** * Provide an instance of a user. Allows custom {@link User} creation. @@ -98,11 +99,11 @@ public class UserProvider extends BaseConfigurable { } // Provide the reload function that uses the executor thread. - public ListenableFuture reload(final String k, - String[] oldValue) throws Exception { + @Override + public ListenableFuture reload(final String k, String[] oldValue) + throws Exception { return executor.submit(new Callable() { - @Override public String[] call() throws Exception { return getGroupStrings(k); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java index 93a5fff8db9..89339c536f1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java @@ -42,7 +42,7 @@ public class SpanReceiverHost { private static enum SingletonHolder { INSTANCE; - transient Object lock = new Object(); + final transient Object lock = new Object(); transient SpanReceiverHost host = null; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java index 55a804b5104..4184d34994f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; - import java.util.AbstractMap; import java.util.Collection; import java.util.Comparator; @@ -32,6 +29,9 @@ import java.util.Set; import java.util.SortedSet; import java.util.concurrent.ConcurrentNavigableMap; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + /** * A Map that keeps a sorted array in order to provide the concurrent map interface. * Keeping a sorted array means that it's much more cache line friendly, making reads faster @@ -706,7 +706,7 @@ public class CopyOnWriteArrayMap extends AbstractMap } } - private final class ArrayKeyIterator implements Iterator { + private static final class ArrayKeyIterator implements Iterator { int index; private final ArrayHolder holder; @@ -732,7 +732,7 @@ public class CopyOnWriteArrayMap extends AbstractMap } } - private final class ArrayValueIterator implements Iterator { + private static final class ArrayValueIterator implements Iterator { int index; private final ArrayHolder holder; @@ -758,7 +758,7 @@ public class CopyOnWriteArrayMap extends AbstractMap } } - private final class ArrayEntryIterator implements Iterator> { + private static final class ArrayEntryIterator implements Iterator> { int index; private final ArrayHolder holder; @@ -879,8 +879,7 @@ public class CopyOnWriteArrayMap extends AbstractMap * Binary search for a given key * @param needle The key to look for in all of the entries * @return Same return value as Arrays.binarySearch. - * Positive numbers mean the index. - * Otherwise (-1 * insertion point) - 1 + * Positive numbers mean the index. Otherwise (-1 * insertion point) - 1 */ int find(K needle) { int begin = startIndex; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java index 6ffec704072..eb374986304 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** *

diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java index 9aa00ae4ae9..2667041b35f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * Wraps an existing {@link DataType} implementation as a fixed-length @@ -47,25 +47,39 @@ public class FixedLengthWrapper implements DataType { /** * Retrieve the maximum length (in bytes) of encoded values. */ - public int getLength() { return length; } + public int getLength() { + return length; + } @Override - public boolean isOrderPreserving() { return base.isOrderPreserving(); } + public boolean isOrderPreserving() { + return base.isOrderPreserving(); + } @Override - public Order getOrder() { return base.getOrder(); } + public Order getOrder() { + return base.getOrder(); + } @Override - public boolean isNullable() { return base.isNullable(); } + public boolean isNullable() { + return base.isNullable(); + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(T val) { return length; } + public int encodedLength(T val) { + return length; + } @Override - public Class encodedClass() { return base.encodedClass(); } + public Class encodedClass() { + return base.encodedClass(); + } @Override public int skip(PositionedByteRange src) { @@ -99,7 +113,9 @@ public class FixedLengthWrapper implements DataType { + ") exceeds max length (" + length + ")."); } // TODO: is the zero-padding appropriate? - for (; written < length; written++) { dst.put((byte) 0x00); } + for (; written < length; written++) { + dst.put((byte) 0x00); + } return written; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java index c4e56876235..cdb1173b36a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code byte[]} of variable-length. Build on @@ -32,10 +32,14 @@ public class OrderedBlob extends OrderedBytesBase { public static final OrderedBlob ASCENDING = new OrderedBlob(Order.ASCENDING); public static final OrderedBlob DESCENDING = new OrderedBlob(Order.DESCENDING); - protected OrderedBlob(Order order) { super(order); } + protected OrderedBlob(Order order) { + super(order); + } @Override - public boolean isSkippable() { return false; } + public boolean isSkippable() { + return false; + } @Override public int encodedLength(byte[] val) { @@ -45,7 +49,9 @@ public class OrderedBlob extends OrderedBytesBase { } @Override - public Class encodedClass() { return byte[].class; } + public Class encodedClass() { + return byte[].class; + } @Override public byte[] decode(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java index 7c31e161e8b..44621bf4b30 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An alternative to {@link OrderedBlob} for use by {@link Struct} fields that @@ -33,7 +33,9 @@ public class OrderedBlobVar extends OrderedBytesBase { public static final OrderedBlobVar ASCENDING = new OrderedBlobVar(Order.ASCENDING); public static final OrderedBlobVar DESCENDING = new OrderedBlobVar(Order.DESCENDING); - protected OrderedBlobVar(Order order) { super(order); } + protected OrderedBlobVar(Order order) { + super(order); + } @Override public int encodedLength(byte[] val) { @@ -41,7 +43,9 @@ public class OrderedBlobVar extends OrderedBytesBase { } @Override - public Class encodedClass() { return byte[].class; } + public Class encodedClass() { + return byte[].class; + } @Override public byte[] decode(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java index 9a00b2f1f8e..a4b63de7778 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * Base class for data types backed by the {@link OrderedBytes} encoding @@ -31,21 +31,31 @@ public abstract class OrderedBytesBase implements DataType { protected final Order order; - protected OrderedBytesBase(Order order) { this.order = order; } + protected OrderedBytesBase(Order order) { + this.order = order; + } @Override - public boolean isOrderPreserving() { return true; } + public boolean isOrderPreserving() { + return true; + } @Override - public Order getOrder() { return order; } + public Order getOrder() { + return order; + } // almost all OrderedBytes implementations are nullable. @Override - public boolean isNullable() { return true; } + public boolean isNullable() { + return true; + } // almost all OrderedBytes implementations are skippable. @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java index a43c1309914..966b60d2640 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; - +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code float} of 32-bits using a fixed-length encoding. Based on @@ -33,16 +32,24 @@ public class OrderedFloat32 extends OrderedBytesBase { public static final OrderedFloat32 ASCENDING = new OrderedFloat32(Order.ASCENDING); public static final OrderedFloat32 DESCENDING = new OrderedFloat32(Order.DESCENDING); - protected OrderedFloat32(Order order) { super(order); } + protected OrderedFloat32(Order order) { + super(order); + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public int encodedLength(Float val) { return 5; } + public int encodedLength(Float val) { + return 5; + } @Override - public Class encodedClass() { return Float.class; } + public Class encodedClass() { + return Float.class; + } @Override public Float decode(PositionedByteRange src) { @@ -51,7 +58,9 @@ public class OrderedFloat32 extends OrderedBytesBase { @Override public int encode(PositionedByteRange dst, Float val) { - if (null == val) throw new IllegalArgumentException("Null values not supported."); + if (null == val) { + throw new IllegalArgumentException("Null values not supported."); + } return OrderedBytes.encodeFloat32(dst, val, order); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java index 573e15d4fdb..2c0eb717a29 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; - +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code double} of 64-bits using a fixed-length encoding. Built on @@ -33,16 +32,24 @@ public class OrderedFloat64 extends OrderedBytesBase { public static final OrderedFloat64 ASCENDING = new OrderedFloat64(Order.ASCENDING); public static final OrderedFloat64 DESCENDING = new OrderedFloat64(Order.DESCENDING); - protected OrderedFloat64(Order order) { super(order); } + protected OrderedFloat64(Order order) { + super(order); + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public int encodedLength(Double val) { return 9; } + public int encodedLength(Double val) { + return 9; + } @Override - public Class encodedClass() { return Double.class; } + public Class encodedClass() { + return Double.class; + } @Override public Double decode(PositionedByteRange src) { @@ -51,7 +58,9 @@ public class OrderedFloat64 extends OrderedBytesBase { @Override public int encode(PositionedByteRange dst, Double val) { - if (null == val) throw new IllegalArgumentException("Null values not supported."); + if (null == val) { + throw new IllegalArgumentException("Null values not supported."); + } return OrderedBytes.encodeFloat64(dst, val, order); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java index 6556c37619a..1840fbc5e7f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; - +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code short} of 16-bits using a fixed-length encoding. Built on @@ -33,16 +32,24 @@ public class OrderedInt16 extends OrderedBytesBase { public static final OrderedInt16 ASCENDING = new OrderedInt16(Order.ASCENDING); public static final OrderedInt16 DESCENDING = new OrderedInt16(Order.DESCENDING); - protected OrderedInt16(Order order) { super(order); } + protected OrderedInt16(Order order) { + super(order); + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public int encodedLength(Short val) { return 3; } + public int encodedLength(Short val) { + return 3; + } @Override - public Class encodedClass() { return Short.class; } + public Class encodedClass() { + return Short.class; + } @Override public Short decode(PositionedByteRange src) { @@ -51,7 +58,9 @@ public class OrderedInt16 extends OrderedBytesBase { @Override public int encode(PositionedByteRange dst, Short val) { - if (null == val) throw new IllegalArgumentException("Null values not supported."); + if (null == val) { + throw new IllegalArgumentException("Null values not supported."); + } return OrderedBytes.encodeInt16(dst, val, order); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java index ce4d0fb74af..e45aa5b3ec5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; - +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code int} of 32-bits using a fixed-length encoding. Built on @@ -33,16 +32,24 @@ public class OrderedInt32 extends OrderedBytesBase { public static final OrderedInt32 ASCENDING = new OrderedInt32(Order.ASCENDING); public static final OrderedInt32 DESCENDING = new OrderedInt32(Order.DESCENDING); - protected OrderedInt32(Order order) { super(order); } + protected OrderedInt32(Order order) { + super(order); + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public int encodedLength(Integer val) { return 5; } + public int encodedLength(Integer val) { + return 5; + } @Override - public Class encodedClass() { return Integer.class; } + public Class encodedClass() { + return Integer.class; + } @Override public Integer decode(PositionedByteRange src) { @@ -51,7 +58,9 @@ public class OrderedInt32 extends OrderedBytesBase { @Override public int encode(PositionedByteRange dst, Integer val) { - if (null == val) throw new IllegalArgumentException("Null values not supported."); + if (null == val) { + throw new IllegalArgumentException("Null values not supported."); + } return OrderedBytes.encodeInt32(dst, val, order); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java index c0dd565ba52..3a409d38613 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; - +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code long} of 64-bits using a fixed-length encoding. Built on @@ -33,16 +32,24 @@ public class OrderedInt64 extends OrderedBytesBase { public static final OrderedInt64 ASCENDING = new OrderedInt64(Order.ASCENDING); public static final OrderedInt64 DESCENDING = new OrderedInt64(Order.DESCENDING); - protected OrderedInt64(Order order) { super(order); } + protected OrderedInt64(Order order) { + super(order); + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public int encodedLength(Long val) { return 9; } + public int encodedLength(Long val) { + return 9; + } @Override - public Class encodedClass() { return Long.class; } + public Class encodedClass() { + return Long.class; + } @Override public Long decode(PositionedByteRange src) { @@ -51,7 +58,9 @@ public class OrderedInt64 extends OrderedBytesBase { @Override public int encode(PositionedByteRange dst, Long val) { - if (null == val) throw new IllegalArgumentException("Null values not supported."); + if (null == val) { + throw new IllegalArgumentException("Null values not supported."); + } return OrderedBytes.encodeInt64(dst, val, order); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java index 82d071607c8..4a7b47b241e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; - +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code byte} of 8-bits using a fixed-length encoding. Built on @@ -33,16 +32,24 @@ public class OrderedInt8 extends OrderedBytesBase { public static final OrderedInt8 ASCENDING = new OrderedInt8(Order.ASCENDING); public static final OrderedInt8 DESCENDING = new OrderedInt8(Order.DESCENDING); - protected OrderedInt8(Order order) { super(order); } + protected OrderedInt8(Order order) { + super(order); + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public int encodedLength(Byte val) { return 2; } + public int encodedLength(Byte val) { + return 2; + } @Override - public Class encodedClass() { return Byte.class; } + public Class encodedClass() { + return Byte.class; + } @Override public Byte decode(PositionedByteRange src) { @@ -51,7 +58,9 @@ public class OrderedInt8 extends OrderedBytesBase { @Override public int encode(PositionedByteRange dst, Byte val) { - if (null == val) throw new IllegalArgumentException("Null values not supported."); + if (null == val) { + throw new IllegalArgumentException("Null values not supported."); + } return OrderedBytes.encodeInt8(dst, val, order); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java index aec3e631b2e..3cdb7cc7706 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java @@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.types; import java.math.BigDecimal; import java.math.BigInteger; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@link Number} of arbitrary precision and variable-length encoding. The @@ -40,7 +40,9 @@ public class OrderedNumeric extends OrderedBytesBase { public static final OrderedNumeric ASCENDING = new OrderedNumeric(Order.ASCENDING); public static final OrderedNumeric DESCENDING = new OrderedNumeric(Order.DESCENDING); - protected OrderedNumeric(Order order) { super(order); } + protected OrderedNumeric(Order order) { + super(order); + } @Override public int encodedLength(Number val) { @@ -50,7 +52,9 @@ public class OrderedNumeric extends OrderedBytesBase { } @Override - public Class encodedClass() { return Number.class; } + public Class encodedClass() { + return Number.class; + } @Override public Number decode(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java index 44657cd1c62..e687f398fe7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * A {@code String} of variable-length. Built on @@ -32,7 +32,9 @@ public class OrderedString extends OrderedBytesBase { public static final OrderedString ASCENDING = new OrderedString(Order.ASCENDING); public static final OrderedString DESCENDING = new OrderedString(Order.DESCENDING); - protected OrderedString(Order order) { super(order); } + protected OrderedString(Order order) { + super(order); + } @Override public int encodedLength(String val) { @@ -41,7 +43,9 @@ public class OrderedString extends OrderedBytesBase { } @Override - public Class encodedClass() { return String.class; } + public Class encodedClass() { + return String.class; + } @Override public String decode(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java index 63489cdea81..eb67e71278b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.util.Order; -import org.apache.hadoop.hbase.util.PositionedByteRange; - import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * A base-class for {@link DataType} implementations backed by protobuf. See diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java index 3882da87300..b29a49d5f5a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -32,22 +32,34 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; public class RawByte implements DataType { @Override - public boolean isOrderPreserving() { return false; } + public boolean isOrderPreserving() { + return false; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(Byte val) { return Bytes.SIZEOF_BYTE; } + public int encodedLength(Byte val) { + return Bytes.SIZEOF_BYTE; + } @Override - public Class encodedClass() { return Byte.class; } + public Class encodedClass() { + return Byte.class; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java index d8c83c20d46..5f308ed40e1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with variable-length values @@ -41,20 +41,33 @@ public class RawBytes implements DataType { protected final Order order; - protected RawBytes() { this.order = Order.ASCENDING; } - protected RawBytes(Order order) { this.order = order; } + protected RawBytes() { + this.order = Order.ASCENDING; + } + + protected RawBytes(Order order) { + this.order = order; + } @Override - public boolean isOrderPreserving() { return true; } + public boolean isOrderPreserving() { + return true; + } @Override - public Order getOrder() { return order; } + public Order getOrder() { + return order; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return false; } + public boolean isSkippable() { + return false; + } @Override public int skip(PositionedByteRange src) { @@ -64,10 +77,14 @@ public class RawBytes implements DataType { } @Override - public int encodedLength(byte[] val) { return val.length; } + public int encodedLength(byte[] val) { + return val.length; + } @Override - public Class encodedClass() { return byte[].class; } + public Class encodedClass() { + return byte[].class; + } @Override public byte[] decode(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java index 121e49d0835..3de9ee5d58c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} that encodes fixed-length values encoded using diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java index ef327d3b626..49f9fff296f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} that encodes variable-length values encoded using diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java index 8057b4a883c..76d3913d9b8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -33,22 +33,34 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; public class RawDouble implements DataType { @Override - public boolean isOrderPreserving() { return false; } + public boolean isOrderPreserving() { + return false; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(Double val) { return Bytes.SIZEOF_DOUBLE; } + public int encodedLength(Double val) { + return Bytes.SIZEOF_DOUBLE; + } @Override - public Class encodedClass() { return Double.class; } + public Class encodedClass() { + return Double.class; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java index 9bf6400b6d4..4f96535110a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -33,22 +33,34 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; public class RawFloat implements DataType { @Override - public boolean isOrderPreserving() { return false; } + public boolean isOrderPreserving() { + return false; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(Float val) { return Bytes.SIZEOF_FLOAT; } + public int encodedLength(Float val) { + return Bytes.SIZEOF_FLOAT; + } @Override - public Class encodedClass() { return Float.class; } + public Class encodedClass() { + return Float.class; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java index 21bf212617c..9bdc9cc6285 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -33,22 +33,34 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; public class RawInteger implements DataType { @Override - public boolean isOrderPreserving() { return false; } + public boolean isOrderPreserving() { + return false; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(Integer val) { return Bytes.SIZEOF_INT; } + public int encodedLength(Integer val) { + return Bytes.SIZEOF_INT; + } @Override - public Class encodedClass() { return Integer.class; } + public Class encodedClass() { + return Integer.class; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java index 3a235f4b906..8a9d4ee7335 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -33,22 +33,34 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; public class RawLong implements DataType { @Override - public boolean isOrderPreserving() { return false; } + public boolean isOrderPreserving() { + return false; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(Long val) { return Bytes.SIZEOF_LONG; } + public int encodedLength(Long val) { + return Bytes.SIZEOF_LONG; + } @Override - public Class encodedClass() { return Long.class; } + public Class encodedClass() { + return Long.class; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java index a14fdfa2694..6387f3b43c4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -33,22 +33,34 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; public class RawShort implements DataType { @Override - public boolean isOrderPreserving() { return false; } + public boolean isOrderPreserving() { + return false; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override - public int encodedLength(Short val) { return Bytes.SIZEOF_SHORT; } + public int encodedLength(Short val) { + return Bytes.SIZEOF_SHORT; + } @Override - public Class encodedClass() { return Short.class; } + public Class encodedClass() { + return Short.class; + } @Override public int skip(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java index 33361b7d8ea..69034cec916 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} for interacting with values encoded using @@ -38,20 +38,33 @@ public class RawString implements DataType { protected final Order order; - protected RawString() { this.order = Order.ASCENDING; } - protected RawString(Order order) { this.order = order; } + protected RawString() { + this.order = Order.ASCENDING; + } + + protected RawString(Order order) { + this.order = order; + } @Override - public boolean isOrderPreserving() { return true; } + public boolean isOrderPreserving() { + return true; + } @Override - public Order getOrder() { return order; } + public Order getOrder() { + return order; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return false; } + public boolean isSkippable() { + return false; + } @Override public int skip(PositionedByteRange src) { @@ -61,10 +74,14 @@ public class RawString implements DataType { } @Override - public int encodedLength(String val) { return Bytes.toBytes(val).length; } + public int encodedLength(String val) { + return Bytes.toBytes(val).length; + } @Override - public Class encodedClass() { return String.class; } + public Class encodedClass() { + return String.class; + } @Override public String decode(PositionedByteRange src) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java index 431b25f271f..e8b99bdee06 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} that encodes fixed-length values encoded using diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java index 6a4a2e7893c..397f443465d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@code DataType} that encodes variable-length values encoded using diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java index a06c1758278..793c91094e4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java @@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.types; import java.util.Iterator; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** *

@@ -102,41 +102,56 @@ public class Struct implements DataType { boolean skippable = true; for (int i = 0; i < this.fields.length; i++) { DataType dt = this.fields[i]; - if (!dt.isOrderPreserving()) preservesOrder = false; + if (!dt.isOrderPreserving()) { + preservesOrder = false; + } if (i < this.fields.length - 2 && !dt.isSkippable()) { throw new IllegalArgumentException("Field in position " + i + " is not skippable. Non-right-most struct fields must be skippable."); } - if (!dt.isSkippable()) skippable = false; + if (!dt.isSkippable()) { + skippable = false; + } } this.isOrderPreserving = preservesOrder; this.isSkippable = skippable; } @Override - public boolean isOrderPreserving() { return isOrderPreserving; } + public boolean isOrderPreserving() { + return isOrderPreserving; + } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override - public boolean isNullable() { return false; } + public boolean isNullable() { + return false; + } @Override - public boolean isSkippable() { return isSkippable; } + public boolean isSkippable() { + return isSkippable; + } @SuppressWarnings("unchecked") @Override public int encodedLength(Object[] val) { assert fields.length >= val.length; int sum = 0; - for (int i = 0; i < val.length; i++) + for (int i = 0; i < val.length; i++) { sum += fields[i].encodedLength(val[i]); + } return sum; } @Override - public Class encodedClass() { return Object[].class; } + public Class encodedClass() { + return Object[].class; + } /** * Retrieve an {@link Iterator} over the values encoded in {@code src}. @@ -150,8 +165,9 @@ public class Struct implements DataType { public int skip(PositionedByteRange src) { StructIterator it = iterator(src); int skipped = 0; - while (it.hasNext()) + while (it.hasNext()) { skipped += it.skip(); + } return skipped; } @@ -160,8 +176,9 @@ public class Struct implements DataType { int i = 0; Object[] ret = new Object[fields.length]; Iterator it = iterator(src); - while (it.hasNext()) + while (it.hasNext()) { ret[i++] = it.next(); + } return ret; } @@ -171,20 +188,25 @@ public class Struct implements DataType { public Object decode(PositionedByteRange src, int index) { assert index >= 0; StructIterator it = iterator(src.shallowCopy()); - for (; index > 0; index--) + for (; index > 0; index--) { it.skip(); + } return it.next(); } @SuppressWarnings("unchecked") @Override public int encode(PositionedByteRange dst, Object[] val) { - if (val.length == 0) return 0; + if (val.length == 0) { + return 0; + } assert fields.length >= val.length; int end, written = 0; // find the last occurrence of a non-null or null and non-nullable value for (end = val.length - 1; end > -1; end--) { - if (null != val[end] || (null == val[end] && !fields[end].isNullable())) break; + if (null != val[end] || (null == val[end] && !fields[end].isNullable())) { + break; + } } for (int i = 0; i <= end; i++) { written += fields[i].encode(dst, val[i]); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java index d49b7847bf4..98511afdfaf 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java @@ -38,15 +38,23 @@ public class StructBuilder { /** * Append {@code field} to the sequence of accumulated fields. */ - public StructBuilder add(DataType field) { fields.add(field); return this; } + public StructBuilder add(DataType field) { + fields.add(field); + return this; + } /** * Retrieve the {@link Struct} represented by {@code this}. */ - public Struct toStruct() { return new Struct(fields.toArray(new DataType[fields.size()])); } + public Struct toStruct() { + return new Struct(fields.toArray(new DataType[fields.size()])); + } /** * Reset the sequence of accumulated fields. */ - public StructBuilder reset() { fields.clear(); return this; } + public StructBuilder reset() { + fields.clear(); + return this; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java index 3227e3ba8b3..e6226a54d4d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.types; import java.util.Iterator; import java.util.NoSuchElementException; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * An {@link Iterator} over encoded {@code Struct} members. @@ -73,13 +73,19 @@ public class StructIterator implements Iterator { } @Override - public void remove() { throw new UnsupportedOperationException(); } + public void remove() { + throw new UnsupportedOperationException(); + } @Override public Object next() { - if (!hasNext()) throw new NoSuchElementException(); + if (!hasNext()) { + throw new NoSuchElementException(); + } DataType t = types[idx++]; - if (src.getPosition() == src.getLength() && t.isNullable()) return null; + if (src.getPosition() == src.getLength() && t.isNullable()) { + return null; + } return t.decode(src); } @@ -88,9 +94,13 @@ public class StructIterator implements Iterator { * @return the number of bytes skipped. */ public int skip() { - if (!hasNext()) throw new NoSuchElementException(); + if (!hasNext()) { + throw new NoSuchElementException(); + } DataType t = types[idx++]; - if (src.getPosition() == src.getLength() && t.isNullable()) return 0; + if (src.getPosition() == src.getLength() && t.isNullable()) { + return 0; + } return t.skip(src); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java index 17ff6f4e4f6..dcd6b844008 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * Wraps an existing {@code DataType} implementation as a terminated @@ -40,8 +40,9 @@ public class TerminatedWrapper implements DataType { * @throws IllegalArgumentException when {@code term} is null or empty. */ public TerminatedWrapper(DataType wrapped, byte[] term) { - if (null == term || term.length == 0) + if (null == term || term.length == 0) { throw new IllegalArgumentException("terminator must be non-null and non-empty."); + } this.wrapped = wrapped; wrapped.getOrder().apply(term); this.term = term; @@ -58,16 +59,24 @@ public class TerminatedWrapper implements DataType { } @Override - public boolean isOrderPreserving() { return wrapped.isOrderPreserving(); } + public boolean isOrderPreserving() { + return wrapped.isOrderPreserving(); + } @Override - public Order getOrder() { return wrapped.getOrder(); } + public Order getOrder() { + return wrapped.getOrder(); + } @Override - public boolean isNullable() { return wrapped.isNullable(); } + public boolean isNullable() { + return wrapped.isNullable(); + } @Override - public boolean isSkippable() { return true; } + public boolean isSkippable() { + return true; + } @Override public int encodedLength(T val) { @@ -75,7 +84,9 @@ public class TerminatedWrapper implements DataType { } @Override - public Class encodedClass() { return wrapped.encodedClass(); } + public Class encodedClass() { + return wrapped.encodedClass(); + } /** * Return the position at which {@code term} begins within {@code src}, @@ -86,12 +97,18 @@ public class TerminatedWrapper implements DataType { final int offset = src.getOffset(); int i; SKIP: for (i = src.getPosition(); i < src.getLength(); i++) { - if (a[offset + i] != term[0]) continue; + if (a[offset + i] != term[0]) { + continue; + } int j; for (j = 1; j < term.length && offset + j < src.getLength(); j++) { - if (a[offset + i + j] != term[j]) continue SKIP; + if (a[offset + i + j] != term[j]) { + continue SKIP; + } + } + if (j == term.length) { + return i; // success } - if (j == term.length) return i; // success } return -1; } @@ -112,7 +129,9 @@ public class TerminatedWrapper implements DataType { // find the terminator position final int start = src.getPosition(); int skipped = terminatorPosition(src); - if (-1 == skipped) throw new IllegalArgumentException("Terminator sequence not found."); + if (-1 == skipped) { + throw new IllegalArgumentException("Terminator sequence not found."); + } skipped += term.length; src.setPosition(skipped); return skipped - start; @@ -128,7 +147,9 @@ public class TerminatedWrapper implements DataType { } else { // find the terminator position int term = terminatorPosition(src); - if (-1 == term) throw new IllegalArgumentException("Terminator sequence not found."); + if (-1 == term) { + throw new IllegalArgumentException("Terminator sequence not found."); + } byte[] b = new byte[term - src.getPosition()]; src.get(b); // TODO: should we assert that b.position == b.length? diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java index 8dd6b2c5e9c..5c9f26d5165 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * The {@code Union} family of {@link DataType}s encode one of a fixed @@ -48,7 +48,9 @@ public abstract class Union2 implements DataType { } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override public boolean isNullable() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java index 3b2a606ed37..f489ba257f5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * The {@code Union} family of {@link DataType}s encode one of a fixed @@ -48,7 +48,9 @@ public abstract class Union3 extends Union2 { } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override public boolean isNullable() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java index cadea44ed1d..c4c814bf908 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.types; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.yetus.audience.InterfaceAudience; /** * The {@code Union} family of {@link DataType}s encode one of a fixed @@ -47,7 +47,9 @@ public abstract class Union4 extends Union3 { } @Override - public Order getOrder() { return null; } + public Order getOrder() { + return null; + } @Override public boolean isNullable() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java index 85b1e4bc7a8..60de6cdde4d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java @@ -189,9 +189,9 @@ public abstract class AbstractByteRange implements ByteRange { public short getShort(int index) { int offset = this.offset + index; short n = 0; - n ^= bytes[offset] & 0xFF; - n <<= 8; - n ^= bytes[offset + 1] & 0xFF; + n = (short) ((n ^ bytes[offset]) & 0xFF); + n = (short) (n << 8); + n = (short) ((n ^ bytes[offset + 1]) & 0xFF); return n; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java index e301c1fffdf..21b174e71b3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java @@ -36,9 +36,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.yetus.audience.InterfaceAudience; /** * Common base class used for HBase command-line tools. Simplifies workflow and diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index 7bc40085a10..91df2d56acb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -35,6 +35,7 @@ import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @@ -569,7 +570,7 @@ public class Base64 { return new String(baos.toByteArray(), PREFERRED_ENCODING); } catch (UnsupportedEncodingException uue) { - return new String(baos.toByteArray()); + return new String(baos.toByteArray(), StandardCharsets.UTF_8); } catch (IOException e) { LOG.error("error encoding object", e); @@ -694,7 +695,7 @@ public class Base64 { return new String(baos.toByteArray(), PREFERRED_ENCODING); } catch (UnsupportedEncodingException uue) { - return new String(baos.toByteArray()); + return new String(baos.toByteArray(), StandardCharsets.UTF_8); } catch (IOException e) { LOG.error("error encoding byte array", e); @@ -751,7 +752,7 @@ public class Base64 { return new String(outBuff, 0, e, PREFERRED_ENCODING); } catch (UnsupportedEncodingException uue) { - return new String(outBuff, 0, e); + return new String(outBuff, 0, e, StandardCharsets.UTF_8); } } // end encodeBytes @@ -926,7 +927,7 @@ public class Base64 { bytes = s.getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException uee) { - bytes = s.getBytes(); + bytes = s.getBytes(StandardCharsets.UTF_8); } // end catch // Decode diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java index 2715740864b..b2e5c9b751e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java @@ -28,15 +28,14 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.MultiByteBuff; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.util.StringUtils; - -import com.google.common.annotations.VisibleForTesting; +import org.apache.yetus.audience.InterfaceAudience; /** * This class manages an array of ByteBuffers with a default size 4MB. These @@ -237,7 +236,7 @@ public class ByteBufferArray { int endBuffer = (int) (end / bufferSize), endOffset = (int) (end % bufferSize); assert array.length >= len + arrayOffset; assert startBuffer >= 0 && startBuffer < bufferCount; - assert endBuffer >= 0 && endBuffer < bufferCount + assert (endBuffer >= 0 && endBuffer < bufferCount) || (endBuffer == bufferCount && endOffset == 0); if (startBuffer >= buffers.length || startBuffer < 0) { String msg = "Failed multiple, start=" + start + ",startBuffer=" @@ -285,7 +284,7 @@ public class ByteBufferArray { endBufferOffset = bufferSize; } assert startBuffer >= 0 && startBuffer < bufferCount; - assert endBuffer >= 0 && endBuffer < bufferCount + assert (endBuffer >= 0 && endBuffer < bufferCount) || (endBuffer == bufferCount && endBufferOffset == 0); if (startBuffer >= buffers.length || startBuffer < 0) { String msg = "Failed subArray, start=" + offset + ",startBuffer=" + startBuffer diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java index 3fc1a7bdacf..5e6809f5d8b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java @@ -16,7 +16,6 @@ */ package org.apache.hadoop.hbase.util; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; @@ -29,14 +28,15 @@ import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Arrays; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBufferWriter; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.WritableUtils; - +import org.apache.yetus.audience.InterfaceAudience; import sun.nio.ch.DirectBuffer; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; + /** * Utility functions for working with byte buffers, such as reading/writing * variable-length long numbers. @@ -245,27 +245,27 @@ public final class ByteBufferUtils { return 8; } - if (value < (1l << 4 * 8)) { + if (value < (1L << (4 * 8))) { // no more than 4 bytes - if (value < (1l << 2 * 8)) { - if (value < (1l << 1 * 8)) { + if (value < (1L << (2 * 8))) { + if (value < (1L << (1 * 8))) { return 1; } return 2; } - if (value < (1l << 3 * 8)) { + if (value < (1L << (3 * 8))) { return 3; } return 4; } // more than 4 bytes - if (value < (1l << 6 * 8)) { - if (value < (1l << 5 * 8)) { + if (value < (1L << (6 * 8))) { + if (value < (1L << (5 * 8))) { return 5; } return 6; } - if (value < (1l << 7 * 8)) { + if (value < (1L << (7 * 8))) { return 7; } return 8; @@ -281,13 +281,13 @@ public final class ByteBufferUtils { return 4; } - if (value < (1 << 2 * 8)) { - if (value < (1 << 1 * 8)) { + if (value < (1 << (2 * 8))) { + if (value < (1 << (1 * 8))) { return 1; } return 2; } - if (value <= (1 << 3 * 8)) { + if (value <= (1 << (3 * 8))) { return 3; } return 4; @@ -337,7 +337,7 @@ public final class ByteBufferUtils { throws IOException { long tmpLong = 0; for (int i = 0; i < fitInBytes; ++i) { - tmpLong |= (in.read() & 0xffl) << (8 * i); + tmpLong |= (in.read() & 0xffL) << (8 * i); } return tmpLong; } @@ -350,7 +350,7 @@ public final class ByteBufferUtils { public static long readLong(ByteBuffer in, final int fitInBytes) { long tmpLength = 0; for (int i = 0; i < fitInBytes; ++i) { - tmpLength |= (in.get() & 0xffl) << (8l * i); + tmpLength |= (in.get() & 0xffL) << (8L * i); } return tmpLength; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index e36b1bbacb0..c32649b0670 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -36,21 +36,20 @@ import java.util.Comparator; import java.util.Iterator; import java.util.List; +import com.google.protobuf.ByteString; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableUtils; - +import org.apache.yetus.audience.InterfaceAudience; import sun.misc.Unsafe; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import com.google.protobuf.ByteString; /** * Utility class that handles byte arrays, conversions to/from other types, @@ -270,6 +269,7 @@ public class Bytes implements Comparable { * @return Positive if left is bigger than right, 0 if they are equal, and * negative if left is smaller than right. */ + @Override public int compareTo(Bytes that) { return BYTES_RAWCOMPARATOR.compare( this.bytes, this.offset, this.length, @@ -1157,9 +1157,9 @@ public class Bytes implements Comparable { return UnsafeAccess.toShort(bytes, offset); } else { short n = 0; - n ^= bytes[offset] & 0xFF; - n <<= 8; - n ^= bytes[offset+1] & 0xFF; + n = (short) ((n ^ bytes[offset]) & 0xFF); + n = (short) (n << 8); + n = (short) ((n ^ bytes[offset+1]) & 0xFF); return n; } } @@ -1533,8 +1533,8 @@ public class Bytes implements Comparable { final int stride = 8; final int minLength = Math.min(length1, length2); int strideLimit = minLength & ~(stride - 1); - final long offset1Adj = offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; - final long offset2Adj = offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; + final long offset1Adj = (long) offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; + final long offset2Adj = (long) offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; int i; /* diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java index 16d1fc21094..120f0dcc00f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.util; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.util.DataChecksum; +import org.apache.yetus.audience.InterfaceAudience; /** * Checksum types. The Checksum type is a one byte number diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java index b1edf7c9d19..b2af1e7bb48 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java @@ -20,9 +20,10 @@ package org.apache.hadoop.hbase.util; import java.net.URL; import java.net.URLClassLoader; -import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; + /** * Base class loader that defines couple shared constants used by sub-classes. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index 2a83a5d6a1d..eba3b12abc8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -31,7 +31,6 @@ import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -42,16 +41,14 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.permission.FsPermission; - import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.ipc.RemoteException; -import org.apache.yetus.audience.InterfaceAudience; - /** * Utility methods for interacting with the underlying file system. */ @@ -402,7 +399,7 @@ public abstract class CommonFSUtils { private static boolean isValidWALRootDir(Path walDir, final Configuration c) throws IOException { Path rootDir = getRootDir(c); - if (walDir != rootDir) { + if (!walDir.equals(rootDir)) { if (walDir.toString().startsWith(rootDir.toString() + "/")) { throw new IllegalStateException("Illegal WAL directory specified. " + "WAL directories are not permitted to be under the root directory if set."); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index 601969599e5..15828ed8eeb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -37,12 +37,12 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java index d461475be10..76a90f5e8f3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java @@ -44,7 +44,8 @@ public abstract class HasThread implements Runnable { public Thread getThread() { return thread; } - + + @Override public abstract void run(); //// Begin delegation to Thread diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java index ebaed1088f2..1f3d722a9fb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java @@ -74,15 +74,15 @@ public abstract class Hash { * @return hash function instance, or null if type is invalid */ public static Hash getInstance(int type) { - switch(type) { - case JENKINS_HASH: - return JenkinsHash.getInstance(); - case MURMUR_HASH: - return MurmurHash.getInstance(); - case MURMUR_HASH3: - return MurmurHash3.getInstance(); - default: - return null; + switch (type) { + case JENKINS_HASH: + return JenkinsHash.getInstance(); + case MURMUR_HASH: + return MurmurHash.getInstance(); + case MURMUR_HASH3: + return MurmurHash3.getInstance(); + default: + return null; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java index 29e46ceb374..2cb2ea7eb54 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java @@ -23,6 +23,7 @@ import java.util.Map; import java.util.Map.Entry; import org.apache.yetus.audience.InterfaceAudience; + import org.apache.hadoop.hbase.shaded.com.google.common.escape.Escaper; import org.apache.hadoop.hbase.shaded.com.google.common.escape.Escapers; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java index 0571a084455..532f8419b47 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java @@ -18,12 +18,13 @@ package org.apache.hadoop.hbase.util; import java.io.Closeable; import java.io.IOException; +import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.lang.management.ManagementFactory; import java.lang.reflect.Array; +import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.Set; - import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; import javax.management.IntrospectionException; @@ -41,8 +42,8 @@ import javax.management.openmbean.CompositeType; import javax.management.openmbean.TabularData; import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -340,7 +341,8 @@ public class JSONBean { * @throws MalformedObjectNameException */ public static void dumpAllBeans() throws IOException, MalformedObjectNameException { - try (PrintWriter writer = new PrintWriter(System.out)) { + try (PrintWriter writer = new PrintWriter( + new OutputStreamWriter(System.out, StandardCharsets.UTF_8))) { JSONBean dumper = new JSONBean(); try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) { MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java index c11c52a68e9..ae967a16e4e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java @@ -28,7 +28,6 @@ import java.lang.management.RuntimeMXBean; import java.util.Hashtable; import java.util.List; import java.util.Set; - import javax.management.InstanceNotFoundException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java index 13f1996df6c..ab966f17b41 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java @@ -26,6 +26,7 @@ import java.lang.management.ManagementFactory; import java.lang.management.OperatingSystemMXBean; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -160,7 +161,7 @@ public class JVM { new String[]{"bash", "-c", "ls /proc/" + pidhost[0] + "/fdinfo | wc -l"}); inputStream = p.getInputStream(); - inputStreamReader = new InputStreamReader(inputStream); + inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); bufferedReader = new BufferedReader(inputStreamReader); String openFileDesCount; if ((openFileDesCount = bufferedReader.readLine()) != null) { @@ -236,7 +237,7 @@ public class JVM { int count = 0; Process p = Runtime.getRuntime().exec("ps -e"); inputStream = p.getInputStream(); - inputStreamReader = new InputStreamReader(inputStream); + inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); bufferedReader = new BufferedReader(inputStreamReader); while (bufferedReader.readLine() != null) { count++; @@ -288,7 +289,7 @@ public class JVM { //using linux bash commands to retrieve info Process p = Runtime.getRuntime().exec(new String[]{"bash", "-c", "ulimit -n"}); in = p.getInputStream(); - output = new BufferedReader(new InputStreamReader(in)); + output = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); String maxFileDesCount; if ((maxFileDesCount = output.readLine()) != null) { return Long.parseLong(maxFileDesCount); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index 39794386ffe..b68068bf678 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -503,17 +503,17 @@ public class OrderedBytes { x = src.get(); a1 = ord.apply(x) & 0xff; if (-1 == unsignedCmp(a0, 249)) { - return (a0 - 241) * 256 + a1 + 240; + return (a0 - 241L) * 256 + a1 + 240; } x = src.get(); a2 = ord.apply(x) & 0xff; if (a0 == 249) { - return 2288 + 256 * a1 + a2; + return 2288L + 256 * a1 + a2; } x = src.get(); a3 = ord.apply(x) & 0xff; if (a0 == 250) { - return (a1 << 16) | (a2 << 8) | a3; + return ((long) a1 << 16L) | (a2 << 8) | a3; } x = src.get(); a4 = ord.apply(x) & 0xff; @@ -663,7 +663,8 @@ public class OrderedBytes { dst.put((byte) ((2 * d + 1) & 0xff)); abs = abs.subtract(BigDecimal.valueOf(d)); } - a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x + // terminal digit should be 2x + a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe); if (isNeg) { // negative values encoded as ~M DESCENDING.apply(a, offset + startM, dst.getPosition() - startM); @@ -747,8 +748,8 @@ public class OrderedBytes { dst.put((byte) (2 * d + 1)); abs = abs.subtract(BigDecimal.valueOf(d)); } - - a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x + // terminal digit should be 2x + a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe); if (isNeg) { // negative values encoded as ~M DESCENDING.apply(a, offset + startM, dst.getPosition() - startM); @@ -1063,7 +1064,8 @@ public class OrderedBytes { if (s > 1) { dst.put((byte) (0x7f & t)); } else { - dst.getBytes()[offset + dst.getPosition() - 1] &= 0x7f; + dst.getBytes()[offset + dst.getPosition() - 1] = + (byte) (dst.getBytes()[offset + dst.getPosition() - 1] & 0x7f); } } ord.apply(dst.getBytes(), offset + start, dst.getPosition() - start); @@ -1116,7 +1118,7 @@ public class OrderedBytes { ret.put((byte) (t | ((ord.apply(a[offset + i]) & 0x7f) >>> s))); } if (i == end) break; - t = (byte) ((ord.apply(a[offset + i]) << 8 - s) & 0xff); + t = (byte) ((ord.apply(a[offset + i]) << (8 - s)) & 0xff); s = s == 1 ? 7 : s - 1; } src.setPosition(end); @@ -1372,7 +1374,7 @@ public class OrderedBytes { public static int encodeFloat32(PositionedByteRange dst, float val, Order ord) { final int offset = dst.getOffset(), start = dst.getPosition(); int i = Float.floatToIntBits(val); - i ^= ((i >> Integer.SIZE - 1) | Integer.MIN_VALUE); + i ^= ((i >> (Integer.SIZE - 1)) | Integer.MIN_VALUE); dst.put(FIXED_FLOAT32) .put((byte) (i >> 24)) .put((byte) (i >> 16)) @@ -1394,7 +1396,7 @@ public class OrderedBytes { for (int i = 1; i < 4; i++) { val = (val << 8) + (ord.apply(src.get()) & 0xff); } - val ^= (~val >> Integer.SIZE - 1) | Integer.MIN_VALUE; + val ^= (~val >> (Integer.SIZE - 1)) | Integer.MIN_VALUE; return Float.intBitsToFloat(val); } @@ -1466,7 +1468,7 @@ public class OrderedBytes { public static int encodeFloat64(PositionedByteRange dst, double val, Order ord) { final int offset = dst.getOffset(), start = dst.getPosition(); long lng = Double.doubleToLongBits(val); - lng ^= ((lng >> Long.SIZE - 1) | Long.MIN_VALUE); + lng ^= ((lng >> (Long.SIZE - 1)) | Long.MIN_VALUE); dst.put(FIXED_FLOAT64) .put((byte) (lng >> 56)) .put((byte) (lng >> 48)) @@ -1492,7 +1494,7 @@ public class OrderedBytes { for (int i = 1; i < 8; i++) { val = (val << 8) + (ord.apply(src.get()) & 0xff); } - val ^= (~val >> Long.SIZE - 1) | Long.MIN_VALUE; + val ^= (~val >> (Long.SIZE - 1)) | Long.MIN_VALUE; return Double.longBitsToDouble(val); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java index f23e5ca5794..4f4b775af0c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java @@ -25,8 +25,8 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.HBaseException; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class PrettyPrinter { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java index 8b1a0ad7084..6430d2eb120 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java @@ -29,11 +29,9 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.Charset; -import org.apache.commons.logging.Log; - -import org.apache.yetus.audience.InterfaceAudience; - import edu.umd.cs.findbugs.annotations.NonNull; +import org.apache.commons.logging.Log; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class ReflectionUtils { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java index 5f0ddf0fa80..dcf6626ae4c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.util; import java.util.concurrent.TimeUnit; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.RetryCounter.ExponentialBackoffPolicyWithLimit; import org.apache.hadoop.hbase.util.RetryCounter.RetryConfig; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class RetryCounterFactory { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowBloomHashKey.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowBloomHashKey.java index 2587cebe8e8..d750c60083d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowBloomHashKey.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowBloomHashKey.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.yetus.audience.InterfaceAudience; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowColBloomHashKey.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowColBloomHashKey.java index 41260f48a84..17a26acf3f6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowColBloomHashKey.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RowColBloomHashKey.java @@ -18,9 +18,8 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.yetus.audience.InterfaceAudience; /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftObjectPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftObjectPool.java index fbc1a477e8e..4cce1f8e6b0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftObjectPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftObjectPool.java @@ -21,13 +21,12 @@ import java.lang.ref.Reference; import java.lang.ref.SoftReference; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.util.ObjectPool.ObjectFactory; /** * A {@code SoftReference} based shared object pool. * The objects are kept in soft references and * associated with keys which are identified by the {@code equals} method. - * The objects are created by {@link ObjectFactory} on demand. + * The objects are created by ObjectFactory on demand. * The object creation is expected to be lightweight, * and the objects may be excessively created and discarded. * Thread safe. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index b39a5e8fd79..4e2f09f611d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -18,11 +18,13 @@ */ package org.apache.hadoop.hbase.util; +import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; @@ -31,9 +33,9 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @@ -312,7 +314,8 @@ public class Threads { @Override public void printThreadInfo(PrintStream stream, String title) { try { - hadoop26Method.invoke(null, new PrintWriter(stream), title); + hadoop26Method.invoke(null, new PrintWriter( + new OutputStreamWriter(stream, StandardCharsets.UTF_8)), title); } catch (IllegalAccessException | IllegalArgumentException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java index 1442bf7ab8a..908664480ef 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java @@ -44,6 +44,7 @@ public class Triple { return new Triple<>(first, second, third); } + @Override public int hashCode() { int hashFirst = (first != null ? first.hashCode() : 0); int hashSecond = (second != null ? second.hashCode() : 0); @@ -52,6 +53,7 @@ public class Triple { return (hashFirst >> 1) ^ hashSecond ^ (hashThird << 1); } + @Override public boolean equals(Object obj) { if (!(obj instanceof Triple)) { return false; @@ -69,6 +71,7 @@ public class Triple { return true; } + @Override public String toString() { return "(" + first + ", " + second + "," + third + " )"; } @@ -97,6 +100,3 @@ public class Triple { this.third = third; } } - - - diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java index 5cb9ea6054d..50fef6d823e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java @@ -27,7 +27,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - import sun.misc.Unsafe; import sun.nio.ch.DirectBuffer; @@ -326,7 +325,7 @@ public final class UnsafeAccess { destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); destBase = dest.array(); } - long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET; + long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET; unsafeCopy(src, srcAddress, destBase, destAddress, length); } @@ -360,7 +359,7 @@ public final class UnsafeAccess { srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset(); srcBase = src.array(); } - long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET; + long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET; unsafeCopy(srcBase, srcAddress, dest, destAddress, length); } @@ -381,13 +380,13 @@ public final class UnsafeAccess { if (src.isDirect()) { srcAddress = srcOffset + ((DirectBuffer) src).address(); } else { - srcAddress = srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET; + srcAddress = (long) srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET; srcBase = src.array(); } if (dest.isDirect()) { destAddress = destOffset + ((DirectBuffer) dest).address(); } else { - destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); + destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); destBase = dest.array(); } unsafeCopy(srcBase, srcAddress, destBase, destAddress, length); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java index 08f04a51e9b..181979f22c4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java @@ -21,7 +21,6 @@ import java.lang.ref.Reference; import java.lang.ref.WeakReference; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.util.ObjectPool.ObjectFactory; /** * A {@code WeakReference} based shared object pool. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java index 5eef2d35dcf..109b066fa57 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java @@ -22,11 +22,12 @@ import java.io.IOException; import java.util.Map.Entry; import java.util.Properties; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; + /** * Utility methods for reading, and building the ZooKeeper configuration. * diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index 4202036ab4b..db203b80b75 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -287,7 +287,7 @@ public class ClassFinder { return null; } - private class FileFilterWithName implements FileFilter { + private static class FileFilterWithName implements FileFilter { private FileNameFilter nameFilter; public FileFilterWithName(FileNameFilter nameFilter) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index a503820914a..d84e8ec5c45 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -222,7 +222,8 @@ public class HBaseCommonTestingUtility { LOG.warn("Failed to delete " + dir.getAbsolutePath(), ex); } } while (ntries < 30); - return ntries < 30; + + return false; } /** diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java index 751b9e3c89f..709646bcc50 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java @@ -87,7 +87,9 @@ public class ResourceCheckerJUnitListener extends RunListener { static class OpenFileDescriptorResourceAnalyzer extends ResourceChecker.ResourceAnalyzer { @Override public int getVal(Phase phase) { - if (!JVM.isUnix()) return 0; + if (!JVM.isUnix()) { + return 0; + } JVM jvm = new JVM(); return (int)jvm.getOpenFileDescriptorCount(); } @@ -101,16 +103,20 @@ public class ResourceCheckerJUnitListener extends RunListener { static class MaxFileDescriptorResourceAnalyzer extends ResourceChecker.ResourceAnalyzer { @Override public int getVal(Phase phase) { - if (!JVM.isUnix()) return 0; + if (!JVM.isUnix()) { + return 0; + } JVM jvm = new JVM(); return (int)jvm.getMaxFileDescriptorCount(); - } - } + } + } static class SystemLoadAverageResourceAnalyzer extends ResourceChecker.ResourceAnalyzer { @Override public int getVal(Phase phase) { - if (!JVM.isUnix()) return 0; + if (!JVM.isUnix()) { + return 0; + } return (int)(new JVM().getSystemLoadAverage()*100); } } @@ -118,7 +124,9 @@ public class ResourceCheckerJUnitListener extends RunListener { static class ProcessCountResourceAnalyzer extends ResourceChecker.ResourceAnalyzer { @Override public int getVal(Phase phase) { - if (!JVM.isUnix()) return 0; + if (!JVM.isUnix()) { + return 0; + } return new JVM().getNumberOfRunningProcess(); } } @@ -126,7 +134,9 @@ public class ResourceCheckerJUnitListener extends RunListener { static class AvailableMemoryMBResourceAnalyzer extends ResourceChecker.ResourceAnalyzer { @Override public int getVal(Phase phase) { - if (!JVM.isUnix()) return 0; + if (!JVM.isUnix()) { + return 0; + } return (int) (new JVM().getFreeMemory() / (1024L * 1024L)); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java index f179bfefb46..26459f927ee 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java @@ -53,7 +53,7 @@ public class TestByteBufferKeyValue { @Test public void testByteBufferBackedKeyValue() throws Exception { - KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1); + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); ByteBufferCell offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); @@ -96,10 +96,10 @@ public class TestByteBufferKeyValue { assertEquals(0L, offheapKV.getTimestamp()); assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); - kvCell = new KeyValue(row1, fam2, qual2, 0l, Type.Put, row1); + kvCell = new KeyValue(row1, fam2, qual2, 0L, Type.Put, row1); buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); - offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0l); + offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); assertEquals( FAM2, ByteBufferUtils.toStringBinary(offheapKV.getFamilyByteBuffer(), @@ -112,7 +112,7 @@ public class TestByteBufferKeyValue { kvCell = new KeyValue(row1, fam1, nullQualifier, 0L, Type.Put, row1); buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); - offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0l); + offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); assertEquals( ROW1, ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), @@ -135,10 +135,10 @@ public class TestByteBufferKeyValue { @Test public void testByteBufferBackedKeyValueWithTags() throws Exception { - KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1, tags); + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1, tags); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); - ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0l); + ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); assertEquals( ROW1, ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), @@ -175,7 +175,7 @@ public class TestByteBufferKeyValue { @Test public void testGetKeyMethods() throws Exception { - KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1, tags); + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1, tags); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getKeyLength()); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), kvCell.getKeyOffset(), kvCell.getKeyLength()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java index b8754141b17..ad185470f20 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hbase; +import static org.junit.Assert.assertEquals; + import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; - @Category({MiscTests.class, SmallTests.class}) public class TestCellBuilder { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java index f25925ffa6d..4746bec262f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java @@ -55,16 +55,16 @@ public class TestCellComparator { kv2 = new KeyValue(row1, fam1, qual1, val); assertTrue((comparator.compareFamilies(kv1, kv2) > 0)); - kv1 = new KeyValue(row1, fam1, qual1, 1l, val); - kv2 = new KeyValue(row1, fam1, qual1, 2l, val); + kv1 = new KeyValue(row1, fam1, qual1, 1L, val); + kv2 = new KeyValue(row1, fam1, qual1, 2L, val); assertTrue((comparator.compare(kv1, kv2) > 0)); - kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Maximum); + kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum); assertTrue((comparator.compare(kv1, kv2) > 0)); - kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); + kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); assertTrue((CellUtil.equals(kv1, kv2))); } @@ -80,18 +80,18 @@ public class TestCellComparator { assertTrue( (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0); - kv1 = new KeyValue(row1, fam1, qual1, 1l, val); - kv2 = new KeyValue(row1, fam1, qual1, 2l, val); + kv1 = new KeyValue(row1, fam1, qual1, 1L, val); + kv2 = new KeyValue(row1, fam1, qual1, 2L, val); assertTrue( (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0); - kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Maximum); + kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum); assertTrue( (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0); - kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); + kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); assertTrue( (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) == 0); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index 397476f9bc4..0395c0906ee 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; @@ -44,7 +45,7 @@ public class TestCellUtil { /** * CellScannable used in test. Returns a {@link TestCellScanner} */ - private class TestCellScannable implements CellScannable { + private static class TestCellScannable implements CellScannable { private final int cellsCount; TestCellScannable(final int cellsCount) { this.cellsCount = cellsCount; @@ -58,7 +59,7 @@ public class TestCellUtil { /** * CellScanner used in test. */ - private class TestCellScanner implements CellScanner { + private static class TestCellScanner implements CellScanner { private int count = 0; private Cell current = null; private final int cellsCount; @@ -86,7 +87,7 @@ public class TestCellUtil { /** * Cell used in test. Has row only. */ - private class TestCell implements Cell { + private static class TestCell implements Cell { private final byte [] row; TestCell(final int i) { @@ -201,7 +202,6 @@ public class TestCellUtil { /** * Was overflowing if 100k or so lists of cellscanners to return. - * @throws IOException */ @Test public void testCreateCellScannerOverflow() throws IOException { @@ -222,7 +222,9 @@ public class TestCellUtil { consume(CellUtil.createCellScanner(cells), hundredK * 1); NavigableMap> m = new TreeMap<>(Bytes.BYTES_COMPARATOR); List cellArray = new ArrayList<>(hundredK); - for (int i = 0; i < hundredK; i++) cellArray.add(new TestCell(i)); + for (int i = 0; i < hundredK; i++) { + cellArray.add(new TestCell(i)); + } m.put(new byte [] {'f'}, cellArray); consume(CellUtil.createCellScanner(m), hundredK * 1); } @@ -252,7 +254,9 @@ public class TestCellUtil { private void consume(final CellScanner scanner, final int expected) throws IOException { int count = 0; - while (scanner.advance()) count++; + while (scanner.advance()) { + count++; + } Assert.assertEquals(expected, count); } @@ -307,7 +311,8 @@ public class TestCellUtil { @Test public void testFindCommonPrefixInFlatKey() { // The whole key matching case - KeyValue kv1 = new KeyValue("r1".getBytes(), "f1".getBytes(), "q1".getBytes(), null); + KeyValue kv1 = new KeyValue("r1".getBytes(StandardCharsets.UTF_8), + "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(kv1.getKeyLength(), PrivateCellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, true)); Assert.assertEquals(kv1.getKeyLength(), @@ -315,30 +320,35 @@ public class TestCellUtil { Assert.assertEquals(kv1.getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE, PrivateCellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, false)); // The rk length itself mismatch - KeyValue kv2 = new KeyValue("r12".getBytes(), "f1".getBytes(), "q1".getBytes(), null); + KeyValue kv2 = new KeyValue("r12".getBytes(StandardCharsets.UTF_8), + "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(1, PrivateCellUtil.findCommonPrefixInFlatKey(kv1, kv2, true, true)); // part of rk is same - KeyValue kv3 = new KeyValue("r14".getBytes(), "f1".getBytes(), "q1".getBytes(), null); - Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes().length, + KeyValue kv3 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), + "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); + Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes(StandardCharsets.UTF_8).length, PrivateCellUtil.findCommonPrefixInFlatKey(kv2, kv3, true, true)); // entire rk is same but different cf name - KeyValue kv4 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q1".getBytes(), null); + KeyValue kv4 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), + "f2".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE - + "f".getBytes().length, PrivateCellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true)); + + "f".getBytes(StandardCharsets.UTF_8).length, + PrivateCellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true)); // rk and family are same and part of qualifier - KeyValue kv5 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q123".getBytes(), null); + KeyValue kv5 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), + "f2".getBytes(StandardCharsets.UTF_8), "q123".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv4.getFamilyLength() + kv4.getQualifierLength(), PrivateCellUtil.findCommonPrefixInFlatKey(kv4, kv5, true, true)); // rk, cf and q are same. ts differs - KeyValue kv6 = new KeyValue("rk".getBytes(), 1234L); - KeyValue kv7 = new KeyValue("rk".getBytes(), 1235L); + KeyValue kv6 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L); + KeyValue kv7 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1235L); // only last byte out of 8 ts bytes in ts part differs Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv6.getFamilyLength() + kv6.getQualifierLength() + 7, PrivateCellUtil.findCommonPrefixInFlatKey(kv6, kv7, true, true)); // rk, cf, q and ts are same. Only type differs - KeyValue kv8 = new KeyValue("rk".getBytes(), 1234L, Type.Delete); + KeyValue kv8 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L, Type.Delete); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv6.getFamilyLength() + kv6.getQualifierLength() + KeyValue.TIMESTAMP_SIZE, PrivateCellUtil.findCommonPrefixInFlatKey(kv6, kv8, true, true)); @@ -354,7 +364,7 @@ public class TestCellUtil { @Test public void testToString() { byte [] row = Bytes.toBytes("row"); - long ts = 123l; + long ts = 123L; // Make a KeyValue and a Cell and see if same toString result. KeyValue kv = new KeyValue(row, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, ts, KeyValue.Type.Minimum, HConstants.EMPTY_BYTE_ARRAY); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index 826c8db81ec..7d85b97cad8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -167,14 +167,18 @@ public class TestChoreService { @Override protected boolean initialChore() { countOfChoreCalls++; - if (outputOnTicks) outputTickCount(); + if (outputOnTicks) { + outputTickCount(); + } return true; } @Override protected void chore() { countOfChoreCalls++; - if (outputOnTicks) outputTickCount(); + if (outputOnTicks) { + outputTickCount(); + } } private void outputTickCount() { @@ -249,16 +253,16 @@ public class TestChoreService { int loopCount = 0; boolean brokeOutOfLoop = false; - while (!chore.isInitialChoreComplete() && chore.isScheduled()) { - Thread.sleep(failureThreshold * period); - loopCount++; - if (loopCount > 3) { - brokeOutOfLoop = true; - break; - } - } + while (!chore.isInitialChoreComplete() && chore.isScheduled()) { + Thread.sleep(failureThreshold * period); + loopCount++; + if (loopCount > 3) { + brokeOutOfLoop = true; + break; + } + } - assertFalse(brokeOutOfLoop); + assertFalse(brokeOutOfLoop); } finally { shutdownService(service); } @@ -297,10 +301,10 @@ public class TestChoreService { } }; - assertEquals("Name construction failed", chore1.getName(), NAME); - assertEquals("Period construction failed", chore1.getPeriod(), PERIOD); - assertEquals("Initial Delay construction failed", chore1.getInitialDelay(), VALID_DELAY); - assertEquals("TimeUnit construction failed", chore1.getTimeUnit(), UNIT); + assertEquals("Name construction failed", NAME, chore1.getName()); + assertEquals("Period construction failed", PERIOD, chore1.getPeriod()); + assertEquals("Initial Delay construction failed", VALID_DELAY, chore1.getInitialDelay()); + assertEquals("TimeUnit construction failed", UNIT, chore1.getTimeUnit()); ScheduledChore invalidDelayChore = new ScheduledChore(NAME, new SampleStopper(), PERIOD, INVALID_DELAY, UNIT) { @@ -319,7 +323,8 @@ public class TestChoreService { final int corePoolSize = 10; final int defaultCorePoolSize = ChoreService.MIN_CORE_POOL_SIZE; - ChoreService customInit = new ChoreService("testChoreServiceConstruction_custom", corePoolSize, false); + ChoreService customInit = + new ChoreService("testChoreServiceConstruction_custom", corePoolSize, false); try { assertEquals(corePoolSize, customInit.getCorePoolSize()); } finally { @@ -475,7 +480,7 @@ public class TestChoreService { Thread.sleep(chorePeriod * 10); assertEquals("Chores are missing their start time. Should expand core pool size", service.getNumberOfScheduledChores(), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 5); + assertEquals(5, service.getNumberOfChoresMissingStartTime()); // Now we begin to cancel the chores that caused an increase in the core thread pool of the // ChoreService. These cancellations should cause a decrease in the core thread pool. @@ -483,31 +488,31 @@ public class TestChoreService { Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 4); + assertEquals(4, service.getNumberOfChoresMissingStartTime()); slowChore4.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 3); + assertEquals(3, service.getNumberOfChoresMissingStartTime()); slowChore3.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 2); + assertEquals(2, service.getNumberOfChoresMissingStartTime()); slowChore2.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 1); + assertEquals(1, service.getNumberOfChoresMissingStartTime()); slowChore1.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 0); + assertEquals(0, service.getNumberOfChoresMissingStartTime()); } finally { shutdownService(service); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index fc30bbe1bc2..3a6d9350fc7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -38,7 +38,6 @@ import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; - import javax.tools.JavaCompiler; import javax.tools.ToolProvider; @@ -46,7 +45,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.*; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java index 0a0a1d24d6c..2b565f4ca30 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java @@ -19,25 +19,28 @@ */ package org.apache.hadoop.hbase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; + import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; @Category({MiscTests.class, SmallTests.class}) -public class TestCompoundConfiguration extends TestCase { +public class TestCompoundConfiguration { private Configuration baseConf; private int baseConfSize; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { baseConf = new Configuration(); baseConf.set("A", "1"); baseConf.setInt("B", 2); @@ -104,12 +107,15 @@ public class TestCompoundConfiguration extends TestCase { assertEquals(4, compoundConf.getInt("D", 0)); assertNull(compoundConf.get("E")); assertEquals(6, compoundConf.getInt("F", 6)); - + int cnt = 0; for (Map.Entry entry : compoundConf) { cnt++; - if (entry.getKey().equals("B")) assertEquals("2b", entry.getValue()); - else if (entry.getKey().equals("G")) assertEquals(null, entry.getValue()); + if (entry.getKey().equals("B")) { + assertEquals("2b", entry.getValue()); + } else if (entry.getKey().equals("G")) { + assertNull(entry.getValue()); + } } // verify that entries from ImmutableConfigMap's are merged in the iterator's view assertEquals(baseConfSize + 1, cnt); @@ -139,12 +145,15 @@ public class TestCompoundConfiguration extends TestCase { assertNull(compoundConf.get("E")); assertEquals(6, compoundConf.getInt("F", 6)); assertNull(compoundConf.get("G")); - + int cnt = 0; for (Map.Entry entry : compoundConf) { cnt++; - if (entry.getKey().equals("B")) assertEquals("2b", entry.getValue()); - else if (entry.getKey().equals("G")) assertEquals(null, entry.getValue()); + if (entry.getKey().equals("B")) { + assertEquals("2b", entry.getValue()); + } else if (entry.getKey().equals("G")) { + assertNull(entry.getValue()); + } } // verify that entries from ImmutableConfigMap's are merged in the iterator's view assertEquals(baseConfSize + 2, cnt); @@ -180,12 +189,15 @@ public class TestCompoundConfiguration extends TestCase { int cnt = 0; for (Map.Entry entry : compoundConf) { cnt++; - if (entry.getKey().equals("B")) assertEquals("2b", entry.getValue()); - else if (entry.getKey().equals("G")) assertEquals(null, entry.getValue()); + if (entry.getKey().equals("B")) { + assertEquals("2b", entry.getValue()); + } else if (entry.getKey().equals("G")) { + assertNull(entry.getValue()); + } } // verify that entries from ImmutableConfigMap's are merged in the iterator's view assertEquals(4, cnt); - + // Verify that adding map after compound configuration is modified overrides properly CompoundConfiguration conf2 = new CompoundConfiguration(); conf2.set("X", "modification"); @@ -218,8 +230,11 @@ public class TestCompoundConfiguration extends TestCase { int cnt = 0; for (Map.Entry entry : compoundConf) { cnt++; - if (entry.getKey().equals("A")) assertEquals(newValueForA, entry.getValue()); - else if (entry.getKey().equals("B")) assertEquals(newValueForB, entry.getValue()); + if (entry.getKey().equals("A")) { + assertEquals(newValueForA, entry.getValue()); + } else if (entry.getKey().equals("B")) { + assertEquals(newValueForB, entry.getValue()); + } } // verify that entries from ImmutableConfigMap's are merged in the iterator's view assertEquals(baseConfSize + 1, cnt); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index b713ff6bf91..abb6a2825a6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -192,10 +192,9 @@ public class TestHBaseConfiguration { } // Instantiate Hadoop CredentialProviderFactory try { - hadoopCredProviderFactory = hadoopCredProviderFactoryClz.newInstance(); - } catch (InstantiationException e) { - return false; - } catch (IllegalAccessException e) { + hadoopCredProviderFactory = + hadoopCredProviderFactoryClz.getDeclaredConstructor().newInstance(); + } catch (Exception e) { return false; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java index 9abf90843e8..e2556773d70 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; + import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -117,15 +118,16 @@ public class TestIndividualBytesFieldCell { * @param ic An instance of IndividualBytesFieldCell to compare. * @param kv An instance of KeyValue to compare. * @param withTags Whether to write tags. - * @throws IOException */ private void testWriteIntoOutputStream(IndividualBytesFieldCell ic, KeyValue kv, boolean withTags) throws IOException { ByteArrayOutputStream outIC = new ByteArrayOutputStream(ic.getSerializedSize(withTags)); ByteArrayOutputStream outKV = new ByteArrayOutputStream(kv.getSerializedSize(withTags)); - assertEquals(kv.write(outKV, withTags), ic.write(outIC, withTags)); // compare the number of bytes written - assertArrayEquals(outKV.getBuffer(), outIC.getBuffer()); // compare the underlying byte array + // compare the number of bytes written + assertEquals(kv.write(outKV, withTags), ic.write(outIC, withTags)); + // compare the underlying byte array + assertArrayEquals(outKV.getBuffer(), outIC.getBuffer()); } /** @@ -146,15 +148,21 @@ public class TestIndividualBytesFieldCell { byte[] value = null; byte[] tags = null; - Cell ic1 = new IndividualBytesFieldCell(row, family, qualifier, timestamp, type, seqId, value, tags); + Cell ic1 = + new IndividualBytesFieldCell(row, family, qualifier, timestamp, type, seqId, value, tags); Cell kv1 = new KeyValue(row, family, qualifier, timestamp, type, value, tags); - byte[] familyArrayInKV = Bytes.copy(kv1.getFamilyArray() , kv1.getFamilyOffset() , kv1.getFamilyLength()); - byte[] qualifierArrayInKV = Bytes.copy(kv1.getQualifierArray(), kv1.getQualifierOffset(), kv1.getQualifierLength()); - byte[] valueArrayInKV = Bytes.copy(kv1.getValueArray() , kv1.getValueOffset() , kv1.getValueLength()); - byte[] tagsArrayInKV = Bytes.copy(kv1.getTagsArray() , kv1.getTagsOffset() , kv1.getTagsLength()); + byte[] familyArrayInKV = + Bytes.copy(kv1.getFamilyArray(), kv1.getFamilyOffset(), kv1.getFamilyLength()); + byte[] qualifierArrayInKV = + Bytes.copy(kv1.getQualifierArray(), kv1.getQualifierOffset(), kv1.getQualifierLength()); + byte[] valueArrayInKV = + Bytes.copy(kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength()); + byte[] tagsArrayInKV = + Bytes.copy(kv1.getTagsArray(), kv1.getTagsOffset(), kv1.getTagsLength()); - // getXXXArray() for family, qualifier, value and tags are supposed to return empty byte array, rather than null. + // getXXXArray() for family, qualifier, value and tags are supposed to return empty byte array, + // rather than null. assertArrayEquals(familyArrayInKV , ic1.getFamilyArray()); assertArrayEquals(qualifierArrayInKV, ic1.getQualifierArray()); assertArrayEquals(valueArrayInKV , ic1.getValueArray()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 45adc986af5..86891ae77fe 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -18,6 +18,8 @@ */ package org.apache.hadoop.hbase; +import static org.junit.Assert.assertNotEquals; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -30,15 +32,12 @@ import java.util.Set; import java.util.TreeSet; import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import static org.junit.Assert.assertNotEquals; - public class TestKeyValue extends TestCase { private static final Log LOG = LogFactory.getLog(TestKeyValue.class); @@ -582,7 +581,7 @@ public class TestKeyValue extends TestCase { assertTrue(kvA2.equals(deSerKV2)); } - private class MockKeyValue implements Cell { + private static class MockKeyValue implements Cell { private final KeyValue kv; public MockKeyValue(KeyValue kv) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java index 54e25e8c3ac..3bf05c4b96f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.hbase; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; - import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.fail; -import org.apache.hadoop.hbase.testclassification.MiscTests; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; + import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -54,22 +54,23 @@ public class TestTableName extends TestWatcher { return tableName; } - String emptyNames[] ={"", " "}; - String invalidNamespace[] = {":a", "%:a"}; - String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok", - "with-dash.with_underscore", "02-01-2012.my_table_01-02", "xyz._mytable_", "9_9_0.table_02" - , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", "legal..legal.t2", - "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", "ns:my_table_01-02"}; - String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", "spaces not ok", - "-dash-.start_illegal", "new.table with space", "01 .table", "ns:-illegaldash", - "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"}; + String[] emptyNames = {"", " "}; + String[] invalidNamespace = {":a", "%:a"}; + String[] legalTableNames = {"foo", "with-dash_under.dot", "_under_start_ok", + "with-dash.with_underscore", "02-01-2012.my_table_01-02", "xyz._mytable_", "9_9_0.table_02", + "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", "legal..legal.t2", + "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", "ns:my_table_01-02"}; + String[] illegalTableNames = {".dot_start_illegal", "-dash_start_illegal", "spaces not ok", + "-dash-.start_illegal", "new.table with space", "01 .table", "ns:-illegaldash", + "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"}; @Test(expected = IllegalArgumentException.class) public void testInvalidNamespace() { for (String tn : invalidNamespace) { TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn)); - fail("invalid namespace " + tn + " should have failed with IllegalArgumentException for namespace"); + fail("invalid namespace " + tn + + " should have failed with IllegalArgumentException for namespace"); } } @@ -108,7 +109,7 @@ public class TestTableName extends TestWatcher { } } - class Names { + static class Names { String ns; byte[] nsb; String tn; @@ -118,22 +119,30 @@ public class TestTableName extends TestWatcher { Names(String ns, String tn) { this.ns = ns; - nsb = ns.getBytes(); + nsb = ns.getBytes(StandardCharsets.UTF_8); this.tn = tn; - tnb = tn.getBytes(); + tnb = tn.getBytes(StandardCharsets.UTF_8); nn = this.ns + ":" + this.tn; - nnb = nn.getBytes(); + nnb = nn.getBytes(StandardCharsets.UTF_8); } @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } Names names = (Names) o; - if (!ns.equals(names.ns)) return false; - if (!tn.equals(names.tn)) return false; + if (!ns.equals(names.ns)) { + return false; + } + if (!tn.equals(names.tn)) { + return false; + } return true; } @@ -147,16 +156,16 @@ public class TestTableName extends TestWatcher { } Names[] names = new Names[] { - new Names("n1", "n1"), - new Names("n2", "n2"), - new Names("table1", "table1"), - new Names("table2", "table2"), - new Names("table2", "table1"), - new Names("table1", "table2"), - new Names("n1", "table1"), - new Names("n1", "table1"), - new Names("n2", "table2"), - new Names("n2", "table2") + new Names("n1", "n1"), + new Names("n2", "n2"), + new Names("table1", "table1"), + new Names("table2", "table2"), + new Names("table2", "table1"), + new Names("table1", "table2"), + new Names("n1", "table1"), + new Names("n1", "table1"), + new Names("n2", "table2"), + new Names("n2", "table2") }; @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java index 65fae4608ba..fc4a2be4a6a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Rule; import org.junit.Ignore; +import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java index d0ad81d7d85..6ee52cb5c94 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hbase; +import static org.junit.Assert.fail; + import java.text.MessageFormat; import org.apache.commons.logging.Log; @@ -26,8 +28,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; -import static org.junit.Assert.fail; - /** * A class that provides a standard waitFor pattern * See details at https://issues.apache.org/jira/browse/HBASE-7384 diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index fad30de0ebf..78b84f7f745 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -35,14 +35,15 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.RawCell; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; + @Category({MiscTests.class, SmallTests.class}) public class TestCellCodecWithTags { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java index 128ef62974e..48239fbd84f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java @@ -68,7 +68,7 @@ public class TestKeyValueCodec { Codec.Encoder encoder = kvc.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); - final long length = kv.getLength() + Bytes.SIZEOF_INT; + final int length = kv.getLength() + Bytes.SIZEOF_INT; encoder.write(kv); encoder.flush(); dos.close(); @@ -98,7 +98,7 @@ public class TestKeyValueCodec { new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2")); final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3")); - final long length = kv1.getLength() + Bytes.SIZEOF_INT; + final int length = kv1.getLength() + Bytes.SIZEOF_INT; encoder.write(kv1); encoder.write(kv2); encoder.write(kv3); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index 27c64308422..c35f4344273 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -35,14 +35,15 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.RawCell; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; + @Category({MiscTests.class, SmallTests.class}) public class TestKeyValueCodecWithTags { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferListOutputStream.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferListOutputStream.java index e1d1e0484d9..d28064c303d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferListOutputStream.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferListOutputStream.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; + import java.nio.ByteBuffer; import java.util.List; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferPool.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferPool.java index 64a4103d8aa..cf1f8ca2ebc 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferPool.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferPool.java @@ -17,13 +17,15 @@ */ package org.apache.hadoop.hbase.io; +import static org.junit.Assert.assertEquals; + import java.nio.ByteBuffer; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; + @Category({ IOTests.class, SmallTests.class }) public class TestByteBufferPool { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java index dddd9e7eef4..5f4115e2b80 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java @@ -26,12 +26,12 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; -import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ByteBufferCell; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java index 781924b22b0..9b45d0996ad 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java @@ -17,7 +17,6 @@ package org.apache.hadoop.hbase.io.crypto; import java.security.Key; - import javax.crypto.spec.SecretKeySpec; /** diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java index dbf7fc5d26e..0f45e5d0716 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java @@ -133,8 +133,8 @@ public class TestCipherProvider { Cipher a = Encryption.getCipher(conf, "TEST"); assertNotNull(a); assertTrue(a.getProvider() instanceof MyCipherProvider); - assertEquals(a.getName(), "TEST"); - assertEquals(a.getKeyLength(), 0); + assertEquals("TEST", a.getName()); + assertEquals(0, a.getKeyLength()); } @Test @@ -149,7 +149,7 @@ public class TestCipherProvider { assertNotNull(a); assertTrue(a.getProvider() instanceof DefaultCipherProvider); assertEquals(a.getName(), algorithm); - assertEquals(a.getKeyLength(), AES.KEY_LENGTH); + assertEquals(AES.KEY_LENGTH, a.getKeyLength()); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java index 0d38356010b..07dd601d277 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.security.Key; - import javax.crypto.spec.SecretKeySpec; import org.apache.commons.logging.Log; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java index dab03f22e62..036ad603e3f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java @@ -44,9 +44,9 @@ public class TestKeyProvider { Key key = provider.getKey("foo"); assertNotNull("Test provider did not return a key as expected", key); - assertEquals("Test provider did not create a key for AES", key.getAlgorithm(), "AES"); - assertEquals("Test provider did not create a key of adequate length", - key.getEncoded().length, AES.KEY_LENGTH); + assertEquals("Test provider did not create a key for AES", "AES", key.getAlgorithm()); + assertEquals("Test provider did not create a key of adequate length", AES.KEY_LENGTH, + key.getEncoded().length); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 6e9816a6013..790568e26aa 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -22,11 +22,11 @@ import static org.junit.Assert.assertNotNull; import java.io.File; import java.io.FileOutputStream; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.security.Key; import java.security.KeyStore; import java.security.MessageDigest; import java.util.Properties; - import javax.crypto.spec.SecretKeySpec; import org.apache.commons.logging.Log; @@ -52,7 +52,7 @@ public class TestKeyStoreKeyProvider { @BeforeClass public static void setUp() throws Exception { - KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes()); + KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes(StandardCharsets.UTF_8)); // Create a JKECS store containing a test secret key KeyStore store = KeyStore.getInstance("JCEKS"); store.load(null, PASSWORD.toCharArray()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java index ea8879b3b0f..d0f2600e568 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java @@ -30,7 +30,6 @@ import java.security.Provider; import java.security.SecureRandom; import java.security.SecureRandomSpi; import java.security.Security; - import javax.crypto.spec.SecretKeySpec; import org.apache.commons.io.IOUtils; @@ -55,8 +54,8 @@ public class TestAES { public void testAESAlgorithm() throws Exception { Configuration conf = HBaseConfiguration.create(); Cipher aes = Encryption.getCipher(conf, "AES"); - assertEquals(aes.getKeyLength(), AES.KEY_LENGTH); - assertEquals(aes.getIvLength(), AES.IV_LENGTH); + assertEquals(AES.KEY_LENGTH, aes.getKeyLength()); + assertEquals(AES.IV_LENGTH, aes.getIvLength()); Encryptor e = aes.getEncryptor(); e.setKey(new SecretKeySpec(Bytes.fromHex("2b7e151628aed2a6abf7158809cf4f3c"), "AES")); e.setIv(Bytes.fromHex("f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff")); @@ -91,8 +90,7 @@ public class TestAES { DefaultCipherProvider.getInstance().setConf(conf); AES aes = new AES(DefaultCipherProvider.getInstance()); - assertEquals("AES did not find alternate RNG", aes.getRNG().getAlgorithm(), - "TestRNG"); + assertEquals("AES did not find alternate RNG", "TestRNG", aes.getRNG().getAlgorithm()); } static class TestProvider extends Provider { @@ -100,6 +98,7 @@ public class TestAES { public TestProvider() { super("TEST", 1.0, "Test provider"); AccessController.doPrivileged(new PrivilegedAction() { + @Override public Object run() { put("SecureRandom.TestRNG", TestAES.class.getName() + "$TestRNG"); return null; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java index dca62e51748..de9b787596b 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java @@ -16,6 +16,22 @@ */ package org.apache.hadoop.hbase.io.crypto.aes; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.security.AccessController; +import java.security.NoSuchAlgorithmException; +import java.security.PrivilegedAction; +import java.security.Provider; +import java.security.SecureRandom; +import java.security.SecureRandomSpi; +import java.security.Security; +import javax.crypto.spec.SecretKeySpec; + import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -29,14 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import javax.crypto.spec.SecretKeySpec; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.OutputStream; -import java.security.*; - -import static org.junit.Assert.*; - @Category({MiscTests.class, SmallTests.class}) public class TestCommonsAES { @@ -46,8 +54,8 @@ public class TestCommonsAES { public void testAESAlgorithm() throws Exception { Configuration conf = HBaseConfiguration.create(); Cipher aes = Encryption.getCipher(conf, "AES"); - assertEquals(aes.getKeyLength(), CommonsCryptoAES.KEY_LENGTH); - assertEquals(aes.getIvLength(), CommonsCryptoAES.IV_LENGTH); + assertEquals(CommonsCryptoAES.KEY_LENGTH, aes.getKeyLength()); + assertEquals(CommonsCryptoAES.IV_LENGTH, aes.getIvLength()); Encryptor e = aes.getEncryptor(); e.setKey(new SecretKeySpec(Bytes.fromHex("2b7e151628aed2a6abf7158809cf4f3c"), "AES")); e.setIv(Bytes.fromHex("f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff")); @@ -82,8 +90,7 @@ public class TestCommonsAES { DefaultCipherProvider.getInstance().setConf(conf); AES aes = new AES(DefaultCipherProvider.getInstance()); - assertEquals("AES did not find alternate RNG", aes.getRNG().getAlgorithm(), - "TestRNG"); + assertEquals("AES did not find alternate RNG", "TestRNG", aes.getRNG().getAlgorithm()); } static class TestProvider extends Provider { @@ -91,6 +98,7 @@ public class TestCommonsAES { public TestProvider() { super("TEST", 1.0, "Test provider"); AccessController.doPrivileged(new PrivilegedAction() { + @Override public Object run() { put("SecureRandom.TestRNG", TestCommonsAES.class.getName() + "$TestRNG"); return null; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/hadoopbackport/TestThrottledInputStream.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/hadoopbackport/TestThrottledInputStream.java index 15d292d404c..00bd1fbdeb4 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/hadoopbackport/TestThrottledInputStream.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/hadoopbackport/TestThrottledInputStream.java @@ -16,9 +16,10 @@ */ package org.apache.hadoop.hbase.io.hadoopbackport; +import static org.junit.Assert.assertEquals; + import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import static org.junit.Assert.assertEquals; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java index 9569ba85d8b..c53c9f52595 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java @@ -81,7 +81,7 @@ public class TestLRUDictionary { rand.nextBytes(testBytes); // Verify that our randomly generated array doesn't exist in the dictionary - assertEquals(testee.findEntry(testBytes, 0, testBytes.length), -1); + assertEquals(-1, testee.findEntry(testBytes, 0, testBytes.length)); // now since we looked up an entry, we should have added it to the // dictionary, so it isn't empty diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java index 48922d9c9ce..3b724b1cf2c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.nio; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -102,10 +103,10 @@ public class TestMultiByteBuff { public void testPutPrimitives() { ByteBuffer bb = ByteBuffer.allocate(10); SingleByteBuff s = new SingleByteBuff(bb); - s.putLong(-4465109508325701663l); + s.putLong(-4465109508325701663L); bb.rewind(); long long1 = bb.getLong(); - assertEquals(long1, -4465109508325701663l); + assertEquals(-4465109508325701663L, long1); s.position(8); } @@ -235,18 +236,18 @@ public class TestMultiByteBuff { multi.putLong(l2); multi.rewind(); ByteBuffer sub = multi.asSubByteBuffer(Bytes.SIZEOF_LONG); - assertTrue(bb1 == sub); + assertEquals(bb1, sub); assertEquals(l1, ByteBufferUtils.toLong(sub, sub.position())); multi.skip(Bytes.SIZEOF_LONG); sub = multi.asSubByteBuffer(Bytes.SIZEOF_LONG); - assertFalse(bb1 == sub); - assertFalse(bb2 == sub); + assertNotEquals(bb1, sub); + assertNotEquals(bb2, sub); assertEquals(l2, ByteBufferUtils.toLong(sub, sub.position())); multi.rewind(); ObjectIntPair p = new ObjectIntPair<>(); multi.asSubByteBuffer(8, Bytes.SIZEOF_LONG, p); - assertFalse(bb1 == p.getFirst()); - assertFalse(bb2 == p.getFirst()); + assertNotEquals(bb1, p.getFirst()); + assertNotEquals(bb2, p.getFirst()); assertEquals(0, p.getSecond()); assertEquals(l2, ByteBufferUtils.toLong(sub, p.getSecond())); } @@ -291,7 +292,7 @@ public class TestMultiByteBuff { bres[2] = mbb1.get(4); bres[3] = mbb1.get(5); int expected = Bytes.toInt(bres); - assertEquals(res, expected); + assertEquals(expected, res); } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestCopyOnWriteMaps.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestCopyOnWriteMaps.java index 381d3df2bf8..eca6c472fba 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestCopyOnWriteMaps.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestCopyOnWriteMaps.java @@ -18,18 +18,22 @@ package org.apache.hadoop.hbase.types; -import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.util.Map; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.ThreadLocalRandom; -import static org.junit.Assert.*; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; @Category({MiscTests.class, SmallTests.class}) public class TestCopyOnWriteMaps { @@ -258,8 +262,8 @@ public class TestCopyOnWriteMaps { long n = 100L; CopyOnWriteArrayMap tm99 = (CopyOnWriteArrayMap) m.tailMap(99L, false); for (Map.Entry e:tm99.entrySet()) { - assertEquals(new Long(n), e.getKey()); - assertEquals(new Long(n), e.getValue()); + assertEquals(Long.valueOf(n), e.getKey()); + assertEquals(Long.valueOf(n), e.getValue()); n++; } } @@ -277,15 +281,15 @@ public class TestCopyOnWriteMaps { long n = 102; CopyOnWriteArrayMap tm102 = (CopyOnWriteArrayMap) m.tailMap(102L, true); for (Map.Entry e:tm102.entrySet()) { - assertEquals(new Long(n), e.getKey()); - assertEquals(new Long(n), e.getValue()); + assertEquals(Long.valueOf(n), e.getKey()); + assertEquals(Long.valueOf(n), e.getValue()); n++; } n = 99; CopyOnWriteArrayMap tm98 = (CopyOnWriteArrayMap) m.tailMap(98L, true); for (Map.Entry e:tm98.entrySet()) { - assertEquals(new Long(n), e.getKey()); - assertEquals(new Long(n), e.getValue()); + assertEquals(Long.valueOf(n), e.getKey()); + assertEquals(Long.valueOf(n), e.getValue()); n++; } } @@ -302,11 +306,11 @@ public class TestCopyOnWriteMaps { long n = 99; for (Map.Entry e:m.entrySet()) { - assertEquals(new Long(n), e.getKey()); - assertEquals(new Long(n), e.getValue()); + assertEquals(Long.valueOf(n), e.getKey()); + assertEquals(Long.valueOf(n), e.getValue()); n++; } assertEquals(5, m.size()); - assertEquals(false, m.isEmpty()); + assertFalse(m.isEmpty()); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java index 4fcaaec8564..994948b93b0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import java.lang.reflect.Constructor; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -60,20 +61,25 @@ public class TestStruct { @Parameters public static Collection params() { Object[][] pojo1Args = { - new Object[] { "foo", 5, 10.001 }, - new Object[] { "foo", 100, 7.0 }, - new Object[] { "foo", 100, 10.001 }, - new Object[] { "bar", 5, 10.001 }, - new Object[] { "bar", 100, 10.001 }, - new Object[] { "baz", 5, 10.001 }, + new Object[] { "foo", 5, 10.001 }, + new Object[] { "foo", 100, 7.0 }, + new Object[] { "foo", 100, 10.001 }, + new Object[] { "bar", 5, 10.001 }, + new Object[] { "bar", 100, 10.001 }, + new Object[] { "baz", 5, 10.001 }, }; Object[][] pojo2Args = { - new Object[] { new byte[0], "it".getBytes(), "was", "the".getBytes() }, - new Object[] { "best".getBytes(), new byte[0], "of", "times,".getBytes() }, - new Object[] { "it".getBytes(), "was".getBytes(), "", "the".getBytes() }, - new Object[] { "worst".getBytes(), "of".getBytes(), "times,", new byte[0] }, - new Object[] { new byte[0], new byte[0], "", new byte[0] }, + new Object[] { new byte[0], "it".getBytes(StandardCharsets.UTF_8), "was", + "the".getBytes(StandardCharsets.UTF_8) }, + new Object[] { "best".getBytes(StandardCharsets.UTF_8), new byte[0], "of", + "times,".getBytes(StandardCharsets.UTF_8) }, + new Object[] { "it".getBytes(StandardCharsets.UTF_8), + "was".getBytes(StandardCharsets.UTF_8), "", + "the".getBytes(StandardCharsets.UTF_8) }, + new Object[] { "worst".getBytes(StandardCharsets.UTF_8), + "of".getBytes(StandardCharsets.UTF_8), "times,", new byte[0] }, + new Object[] { new byte[0], new byte[0], "", new byte[0] }, }; Object[][] params = new Object[][] { @@ -126,19 +132,55 @@ public class TestStruct { @Override public int compareTo(Pojo1 o) { int cmp = stringFieldAsc.compareTo(o.stringFieldAsc); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } cmp = Integer.valueOf(intFieldAsc).compareTo(Integer.valueOf(o.intFieldAsc)); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } return Double.compare(doubleFieldAsc, o.doubleFieldAsc); } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (null == o) return false; - if (!(o instanceof Pojo1)) return false; - Pojo1 that = (Pojo1) o; - return 0 == this.compareTo(that); + public int hashCode() { + final int prime = 31; + int result = 1; + long temp; + temp = Double.doubleToLongBits(doubleFieldAsc); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + intFieldAsc; + result = prime * result + ((stringFieldAsc == null) ? 0 : stringFieldAsc.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Pojo1 other = (Pojo1) obj; + if (Double.doubleToLongBits(doubleFieldAsc) != + Double.doubleToLongBits(other.doubleFieldAsc)) { + return false; + } + if (intFieldAsc != other.intFieldAsc) { + return false; + } + if (stringFieldAsc == null) { + if (other.stringFieldAsc != null) { + return false; + } + } else if (!stringFieldAsc.equals(other.stringFieldAsc)) { + return false; + } + return true; } } @@ -177,24 +219,69 @@ public class TestStruct { @Override public int compareTo(Pojo2 o) { int cmp = NULL_SAFE_BYTES_COMPARATOR.compare(byteField1Asc, o.byteField1Asc); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } cmp = -NULL_SAFE_BYTES_COMPARATOR.compare(byteField2Dsc, o.byteField2Dsc); - if (cmp != 0) return cmp; - if (stringFieldDsc == o.stringFieldDsc) cmp = 0; - else if (null == stringFieldDsc) cmp = 1; - else if (null == o.stringFieldDsc) cmp = -1; + if (cmp != 0) { + return cmp; + } + if (null == stringFieldDsc) { + cmp = 1; + } + else if (null == o.stringFieldDsc) { + cmp = -1; + } + else if (stringFieldDsc.equals(o.stringFieldDsc)) { + cmp = 0; + } else cmp = -stringFieldDsc.compareTo(o.stringFieldDsc); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } return -NULL_SAFE_BYTES_COMPARATOR.compare(byteField3Dsc, o.byteField3Dsc); } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (null == o) return false; - if (!(o instanceof Pojo2)) return false; - Pojo2 that = (Pojo2) o; - return 0 == this.compareTo(that); + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(byteField1Asc); + result = prime * result + Arrays.hashCode(byteField2Dsc); + result = prime * result + Arrays.hashCode(byteField3Dsc); + result = prime * result + ((stringFieldDsc == null) ? 0 : stringFieldDsc.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Pojo2 other = (Pojo2) obj; + if (!Arrays.equals(byteField1Asc, other.byteField1Asc)) { + return false; + } + if (!Arrays.equals(byteField2Dsc, other.byteField2Dsc)) { + return false; + } + if (!Arrays.equals(byteField3Dsc, other.byteField3Dsc)) { + return false; + } + if (stringFieldDsc == null) { + if (other.stringFieldDsc != null) { + return false; + } + } else if (!stringFieldDsc.equals(other.stringFieldDsc)) { + return false; + } + return true; } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/AbstractHBaseToolTest.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/AbstractHBaseToolTest.java index 79dafe79671..96181114dcf 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/AbstractHBaseToolTest.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/AbstractHBaseToolTest.java @@ -17,19 +17,22 @@ package org.apache.hadoop.hbase.util; +import static org.apache.hadoop.hbase.util.AbstractHBaseTool.EXIT_FAILURE; +import static org.apache.hadoop.hbase.util.AbstractHBaseTool.EXIT_SUCCESS; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.List; + import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.hadoop.hbase.HBaseConfiguration; import org.junit.Before; import org.junit.Test; -import java.util.ArrayList; -import java.util.List; - -import static org.apache.hadoop.hbase.util.AbstractHBaseTool.EXIT_FAILURE; -import static org.apache.hadoop.hbase.util.AbstractHBaseTool.EXIT_SUCCESS; -import static org.junit.Assert.*; - public class AbstractHBaseToolTest { static final class Options { static final Option REQUIRED = new Option(null, "required", true, ""); @@ -43,7 +46,7 @@ public class AbstractHBaseToolTest { * 2 deprecated options to test backward compatibility: -opt (old version of --optional) and * -bool (old version of --boolean). */ - private class TestTool extends AbstractHBaseTool { + private static class TestTool extends AbstractHBaseTool { String requiredValue; String optionalValue; boolean booleanValue; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java index ba6cea07144..17471bb64db 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java @@ -23,13 +23,13 @@ import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; -import java.io.FileWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.ArrayList; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; - import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; @@ -127,7 +127,7 @@ public class ClassLoaderTestHelper { File srcDirPath = new File(srcDir.toString()); srcDirPath.mkdirs(); File sourceCodeFile = new File(srcDir.toString(), className + ".java"); - BufferedWriter bw = new BufferedWriter(new FileWriter(sourceCodeFile)); + BufferedWriter bw = Files.newBufferedWriter(sourceCodeFile.toPath(), StandardCharsets.UTF_8); bw.write(javaCode); bw.close(); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java index 6835c985c09..9cca3122ea3 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java @@ -25,13 +25,13 @@ import java.util.Map; import java.util.Random; import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java index 554e10803af..6c4e08eff0a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java @@ -17,13 +17,18 @@ package org.apache.hadoop.hbase.util; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import java.util.Random; import java.util.TreeMap; -import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.util.AvlUtil.AvlKeyComparator; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.AvlUtil.AvlIterableList; +import org.apache.hadoop.hbase.util.AvlUtil.AvlKeyComparator; import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode; import org.apache.hadoop.hbase.util.AvlUtil.AvlNode; import org.apache.hadoop.hbase.util.AvlUtil.AvlNodeVisitor; @@ -32,13 +37,6 @@ import org.apache.hadoop.hbase.util.AvlUtil.AvlTreeIterator; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - @Category({MiscTests.class, SmallTests.class}) public class TestAvlUtil { private static final TestAvlKeyComparator KEY_COMPARATOR = new TestAvlKeyComparator(); @@ -97,6 +95,7 @@ public class TestAvlUtil { AvlTree.visit(root, new AvlNodeVisitor() { private int prevKey = -1; + @Override public boolean visitNode(TestAvlNode node) { assertEquals(prevKey, node.getKey() - 1); assertTrue(node.getKey() >= MIN_KEY); @@ -254,6 +253,7 @@ public class TestAvlUtil { } private static class TestAvlKeyComparator implements AvlKeyComparator { + @Override public int compareKey(TestAvlNode node, Object key) { return node.getKey() - (int)key; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java index 7c74bca7f96..e6098049e80 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java @@ -24,7 +24,6 @@ import java.util.Map; import java.util.TreeMap; import junit.framework.TestCase; - import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.experimental.categories.Category; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java index 4b879455923..9ee356f56ae 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java @@ -64,9 +64,9 @@ public class TestByteBufferArray { assertEquals(119, array.buffers.length); for (int i = 0; i < array.buffers.length; i++) { if (i == array.buffers.length - 1) { - assertEquals(array.buffers[i].capacity(), 0); + assertEquals(0, array.buffers[i].capacity()); } else { - assertEquals(array.buffers[i].capacity(), ByteBufferArray.DEFAULT_BUFFER_SIZE); + assertEquals(ByteBufferArray.DEFAULT_BUFFER_SIZE, array.buffers[i].capacity()); } } } @@ -86,9 +86,9 @@ public class TestByteBufferArray { array.createBuffers(allocator); for (int i = 0; i < array.buffers.length; i++) { if (i == array.buffers.length - 1) { - assertEquals(array.buffers[i].capacity(), 0); + assertEquals(0, array.buffers[i].capacity()); } else { - assertEquals(array.buffers[i].capacity(), 458752); + assertEquals(458752, array.buffers[i].capacity()); } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java index 81923cadd11..f4687fa5deb 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java @@ -313,8 +313,8 @@ public class TestByteBufferUtils { public void testLongFitsIn() { assertEquals(1, ByteBufferUtils.longFitsIn(0)); assertEquals(1, ByteBufferUtils.longFitsIn(1)); - assertEquals(3, ByteBufferUtils.longFitsIn(1l << 16)); - assertEquals(5, ByteBufferUtils.longFitsIn(1l << 32)); + assertEquals(3, ByteBufferUtils.longFitsIn(1L << 16)); + assertEquals(5, ByteBufferUtils.longFitsIn(1L << 32)); assertEquals(8, ByteBufferUtils.longFitsIn(-1)); assertEquals(8, ByteBufferUtils.longFitsIn(Long.MIN_VALUE)); assertEquals(8, ByteBufferUtils.longFitsIn(Long.MAX_VALUE)); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java index a3609e3e4f3..297bd62147f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.util; import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Assert; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java index 38b01b82c45..546840c114c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java @@ -30,7 +30,6 @@ import java.util.List; import java.util.Random; import junit.framework.TestCase; - import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.io.WritableUtils; @@ -158,9 +157,9 @@ public class TestBytes extends TestCase { float [] floats = {-1f, 123.123f, Float.MAX_VALUE}; for (int i = 0; i < floats.length; i++) { byte [] b = Bytes.toBytes(floats[i]); - assertEquals(floats[i], Bytes.toFloat(b)); + assertEquals(floats[i], Bytes.toFloat(b), 0.0f); byte [] b2 = bytesWithOffset(b); - assertEquals(floats[i], Bytes.toFloat(b2, 1)); + assertEquals(floats[i], Bytes.toFloat(b2, 1), 0.0f); } } @@ -168,9 +167,9 @@ public class TestBytes extends TestCase { double [] doubles = {Double.MIN_VALUE, Double.MAX_VALUE}; for (int i = 0; i < doubles.length; i++) { byte [] b = Bytes.toBytes(doubles[i]); - assertEquals(doubles[i], Bytes.toDouble(b)); + assertEquals(doubles[i], Bytes.toDouble(b), 0.0); byte [] b2 = bytesWithOffset(b); - assertEquals(doubles[i], Bytes.toDouble(b2, 1)); + assertEquals(doubles[i], Bytes.toDouble(b2, 1), 0.0); } } @@ -419,18 +418,18 @@ public class TestBytes extends TestCase { public void testToStringBinary_toBytesBinary_Reversable() throws Exception { String bytes = Bytes.toStringBinary(Bytes.toBytes(2.17)); - assertEquals(2.17, Bytes.toDouble(Bytes.toBytesBinary(bytes)), 0); + assertEquals(2.17, Bytes.toDouble(Bytes.toBytesBinary(bytes)), 0); } public void testUnsignedBinarySearch(){ byte[] bytes = new byte[]{0,5,123,127,-128,-100,-1}; - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)5), 1); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)127), 3); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-128), 4); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-100), 5); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-1), 6); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)2), -1-1); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-5), -6-1); + Assert.assertEquals(1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)5)); + Assert.assertEquals(3, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)127)); + Assert.assertEquals(4, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-128)); + Assert.assertEquals(5, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-100)); + Assert.assertEquals(6, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-1)); + Assert.assertEquals(-1-1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)2)); + Assert.assertEquals(-6-1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-5)); } public void testUnsignedIncrement(){ @@ -448,7 +447,7 @@ public class TestBytes extends TestCase { int c2 = Bytes.toInt(Bytes.unsignedCopyAndIncrement(c), 0); Assert.assertTrue(c2==256); } - + public void testIndexOf() { byte[] array = Bytes.toBytes("hello"); assertEquals(1, Bytes.indexOf(array, (byte) 'e')); @@ -458,7 +457,7 @@ public class TestBytes extends TestCase { assertEquals(2, Bytes.indexOf(array, Bytes.toBytes("ll"))); assertEquals(-1, Bytes.indexOf(array, Bytes.toBytes("hll"))); } - + public void testContains() { byte[] array = Bytes.toBytes("hello world"); assertTrue(Bytes.contains(array, (byte) 'e')); @@ -468,7 +467,7 @@ public class TestBytes extends TestCase { assertTrue(Bytes.contains(array, Bytes.toBytes("ello"))); assertFalse(Bytes.contains(array, Bytes.toBytes("owo"))); } - + public void testZero() { byte[] array = Bytes.toBytes("hello"); Bytes.zero(array); @@ -496,7 +495,7 @@ public class TestBytes extends TestCase { Assert.assertEquals(i, b[i]); } } - + public void testToFromHex() { List testStrings = new ArrayList<>(8); testStrings.addAll(Arrays.asList(new String[] { @@ -516,7 +515,7 @@ public class TestBytes extends TestCase { String result = Bytes.toHex(byteData); Assert.assertTrue(testString.equalsIgnoreCase(result)); } - + List testByteData = new ArrayList<>(5); testByteData.addAll(Arrays.asList(new byte[][] { new byte[0], @@ -528,12 +527,11 @@ public class TestBytes extends TestCase { Random r = new Random(); for (int i = 0; i < 20; i++) { - byte[] bytes = new byte[r.nextInt(100)]; r.nextBytes(bytes); testByteData.add(bytes); } - + for (byte[] testData : testByteData) { String hexString = Bytes.toHex(testData); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java index 7ff579277ce..7dd27d48048 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java @@ -100,10 +100,10 @@ public class TestCommonFSUtils { Path root = new Path("file:///hbase/root"); Path walRoot = new Path("file:///hbase/logroot"); CommonFSUtils.setRootDir(conf, root); - assertEquals(CommonFSUtils.getRootDir(conf), root); - assertEquals(CommonFSUtils.getWALRootDir(conf), root); + assertEquals(root, CommonFSUtils.getRootDir(conf)); + assertEquals(root, CommonFSUtils.getWALRootDir(conf)); CommonFSUtils.setWALRootDir(conf, walRoot); - assertEquals(CommonFSUtils.getWALRootDir(conf), walRoot); + assertEquals(walRoot, CommonFSUtils.getWALRootDir(conf)); } @Test(expected=IllegalStateException.class) @@ -120,12 +120,12 @@ public class TestCommonFSUtils { CommonFSUtils.setRootDir(conf, new Path("file:///user/hbase")); Path testFile = new Path(CommonFSUtils.getRootDir(conf), "test/testfile"); Path tmpFile = new Path("file:///test/testfile"); - assertEquals(CommonFSUtils.removeWALRootPath(testFile, conf), "test/testfile"); - assertEquals(CommonFSUtils.removeWALRootPath(tmpFile, conf), tmpFile.toString()); + assertEquals("test/testfile", CommonFSUtils.removeWALRootPath(testFile, conf)); + assertEquals(tmpFile.toString(), CommonFSUtils.removeWALRootPath(tmpFile, conf)); CommonFSUtils.setWALRootDir(conf, new Path("file:///user/hbaseLogDir")); - assertEquals(CommonFSUtils.removeWALRootPath(testFile, conf), testFile.toString()); + assertEquals(testFile.toString(), CommonFSUtils.removeWALRootPath(testFile, conf)); Path logFile = new Path(CommonFSUtils.getWALRootDir(conf), "test/testlog"); - assertEquals(CommonFSUtils.removeWALRootPath(logFile, conf), "test/testlog"); + assertEquals("test/testlog", CommonFSUtils.removeWALRootPath(logFile, conf)); } @Test(expected=NullPointerException.class) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java index 08d5569b0b4..18670a1015e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java @@ -119,6 +119,8 @@ public class TestConcatenatedLists { } @SuppressWarnings("ModifyingCollectionWithItself") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="DMI_VACUOUS_SELF_COLLECTION_CALL", + justification="Intended vacuous containsAll call on 'c'") private void verify(ConcatenatedLists c, int last) { assertEquals((last == -1), c.isEmpty()); assertEquals(last + 1, c.size()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java index 967d272280f..32af01bfe1f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.util; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.File; @@ -87,8 +87,11 @@ public class TestCoprocessorClassLoader { private void checkingLibJarName(String jarName, String libPrefix) throws Exception { File tmpFolder = new File(ClassLoaderTestHelper.localDirPath(conf), "tmp"); if (tmpFolder.exists()) { // Clean up the tmp folder - for (File f: tmpFolder.listFiles()) { - f.delete(); + File[] files = tmpFolder.listFiles(); + if (files != null) { + for (File f: files) { + f.delete(); + } } } String className = "CheckingLibJarName"; @@ -102,10 +105,13 @@ public class TestCoprocessorClassLoader { ClassLoader classLoader = CoprocessorClassLoader.getClassLoader(path, parent, "112", conf); assertNotNull("Classloader should be created", classLoader); String fileToLookFor = "." + className + ".jar"; - for (String f: tmpFolder.list()) { - if (f.endsWith(fileToLookFor) && f.contains(jarName)) { - // Cool, found it; - return; + String[] files = tmpFolder.list(); + if (files != null) { + for (String f: files) { + if (f.endsWith(fileToLookFor) && f.contains(jarName)) { + // Cool, found it; + return; + } } } fail("Could not find the expected lib jar file"); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java index 57514e37917..5bdb668a2d2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java @@ -29,8 +29,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Test; import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; /** diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java index 96b3da08ca7..54de497d656 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java @@ -23,11 +23,12 @@ import static org.junit.Assert.assertEquals; import java.util.LinkedHashMap; import java.util.Map; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; + @Category(SmallTests.class) public class TestJRubyFormat { @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java index f5db4c17f8e..2c403423b67 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.Random; import java.util.Set; @@ -40,8 +41,8 @@ public class TestLoadTestKVGenerator { @Test public void testValueLength() { for (int i = 0; i < 1000; ++i) { - byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(), - String.valueOf(rand.nextInt()).getBytes()); + byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(StandardCharsets.UTF_8), + String.valueOf(rand.nextInt()).getBytes(StandardCharsets.UTF_8)); assertTrue(MIN_LEN <= v.length); assertTrue(v.length <= MAX_LEN); } @@ -51,8 +52,8 @@ public class TestLoadTestKVGenerator { public void testVerification() { for (int i = 0; i < 1000; ++i) { for (int qualIndex = 0; qualIndex < 20; ++qualIndex) { - byte[] qual = String.valueOf(qualIndex).getBytes(); - byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(); + byte[] qual = String.valueOf(qualIndex).getBytes(StandardCharsets.UTF_8); + byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(StandardCharsets.UTF_8); byte[] v = gen.generateRandomSizeValue(rowKey, qual); assertTrue(LoadTestKVGenerator.verify(v, rowKey, qual)); v[0]++; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java index dc0690ad779..c62986d86d7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java @@ -19,9 +19,11 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; @@ -872,24 +874,32 @@ public class TestOrderedBytes { @Test public void testBlobVar() { byte[][] vals = - { "".getBytes(), "foo".getBytes(), "foobarbazbub".getBytes(), - { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, - (byte) 0xaa, /* 7 bytes of alternating bits; testing around HBASE-9893 */ }, - { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, - (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa }, - { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, - (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, - (byte) 0xaa, (byte) 0xaa, /* 14 bytes of alternating bits; testing around HBASE-9893 */ }, - { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, - (byte) 0x55, /* 7 bytes of alternating bits; testing around HBASE-9893 */ }, - { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, - (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55 }, - { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, - (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, - (byte) 0x55, (byte) 0x55, /* 14 bytes of alternating bits; testing around HBASE-9893 */ }, - "1".getBytes(), "22".getBytes(), "333".getBytes(), "4444".getBytes(), - "55555".getBytes(), "666666".getBytes(), "7777777".getBytes(), "88888888".getBytes() - }; + { "".getBytes(StandardCharsets.UTF_8), + "foo".getBytes(StandardCharsets.UTF_8), + "foobarbazbub".getBytes(StandardCharsets.UTF_8), + { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, + (byte) 0xaa, /* 7 bytes of alternating bits; testing around HBASE-9893 */ }, + { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, + (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa }, + { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, + (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, + (byte) 0xaa, (byte) 0xaa, /* 14 bytes of alternating bits; testing around HBASE-9893 */ }, + { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, + (byte) 0x55, /* 7 bytes of alternating bits; testing around HBASE-9893 */ }, + { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, + (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55 }, + { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, + (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, + (byte) 0x55, (byte) 0x55, /* 14 bytes of alternating bits; testing around HBASE-9893 */ }, + "1".getBytes(StandardCharsets.UTF_8), + "22".getBytes(StandardCharsets.UTF_8), + "333".getBytes(StandardCharsets.UTF_8), + "4444".getBytes(StandardCharsets.UTF_8), + "55555".getBytes(StandardCharsets.UTF_8), + "666666".getBytes(StandardCharsets.UTF_8), + "7777777".getBytes(StandardCharsets.UTF_8), + "88888888".getBytes(StandardCharsets.UTF_8) + }; /* * assert encoded values match decoded values. encode into target buffer @@ -959,7 +969,9 @@ public class TestOrderedBytes { @Test public void testBlobCopy() { byte[][] vals = - { "".getBytes(), "foo".getBytes(), "foobarbazbub".getBytes(), + { "".getBytes(StandardCharsets.UTF_8), + "foo".getBytes(StandardCharsets.UTF_8), + "foobarbazbub".getBytes(StandardCharsets.UTF_8), { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa }, { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, @@ -1034,9 +1046,9 @@ public class TestOrderedBytes { byte[] a = new byte[3 + (Order.ASCENDING == ord ? 1 : 2) + 2]; PositionedByteRange buf = new SimplePositionedMutableByteRange(a, 1, 3 + (Order.ASCENDING == ord ? 1 : 2)); - OrderedBytes.encodeBlobCopy(buf, "foobarbaz".getBytes(), 3, 3, ord); + OrderedBytes.encodeBlobCopy(buf, "foobarbaz".getBytes(StandardCharsets.UTF_8), 3, 3, ord); buf.setPosition(0); - assertArrayEquals("bar".getBytes(), OrderedBytes.decodeBlobCopy(buf)); + assertArrayEquals("bar".getBytes(StandardCharsets.UTF_8), OrderedBytes.decodeBlobCopy(buf)); } } @@ -1240,7 +1252,7 @@ public class TestOrderedBytes { buff.setPosition(0); assertEquals(OrderedBytes.length(buff), cnt); for (int i = 0; i < cnt; i++) { - assertEquals(OrderedBytes.isEncodedValue(buff), true); + assertTrue(OrderedBytes.isEncodedValue(buff)); OrderedBytes.skip(buff); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestVersionInfo.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestVersionInfo.java index 42f6ebae3b0..6e71b275f42 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestVersionInfo.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestVersionInfo.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.util; -import org.apache.hadoop.hbase.testclassification.SmallTests; import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java index 216fe0cf700..8309f81f455 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.hbase.zookeeper; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.Properties; @@ -28,9 +31,6 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - @Category({MiscTests.class, SmallTests.class}) public class TestZKConfig {