HBASE-27206 Clean up error-prone findings in hbase-common (#4645)

Signed-off-by: Nick Dimiduk <ndimiduk@apache.org>
Signed-off-by: Duo Zhang <zhangduo@apache.org>
This commit is contained in:
Andrew Purtell 2022-08-21 06:04:46 -07:00 committed by GitHub
parent 35fb37cf86
commit fbe3b90e0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
94 changed files with 577 additions and 698 deletions

View File

@ -264,7 +264,7 @@ public class ByteBufferKeyValue extends ByteBufferExtendedCell {
if (this.buf.hasArray()) {
return ClassSize.align(FIXED_OVERHEAD + length);
}
return ClassSize.align(FIXED_OVERHEAD) + this.getSerializedSize();
return (long) ClassSize.align(FIXED_OVERHEAD) + this.getSerializedSize();
}
@Override

View File

@ -106,15 +106,15 @@ public interface Cell extends HeapSize {
// 4) Timestamp
/**
* @return Long value representing time at which this cell was "Put" into the row. Typically
* represents the time of insertion, but can be any value from 0 to Long.MAX_VALUE.
* Return a long value representing time at which this cell was "Put" into the row. Typically
* represents the time of insertion, but can be any value from 0 to Long.MAX_VALUE.
*/
long getTimestamp();
// 5) Type
/**
* @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc
* Return the byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Use {@link #getType()}.
*/
@Deprecated
@ -160,7 +160,7 @@ public interface Cell extends HeapSize {
byte[] getTagsArray();
/**
* @return the first offset where the tags start in the Cell
* Return the first offset where the tags start in the Cell
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
@Deprecated

View File

@ -95,6 +95,7 @@ public interface CellComparator extends Comparator<Cell>, Serializable {
}
/**
* Lexicographically compare two rows
* @param row ByteBuffer that wraps a row; will read from current position and will reading all
* remaining; will not disturb the ByteBuffer internal state.
* @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both
@ -111,7 +112,7 @@ public interface CellComparator extends Comparator<Cell>, Serializable {
}
/**
* Lexographically compares the two cells excluding the row part. It compares family, qualifier,
* Lexicographically compares the two cells excluding the row part. It compares family, qualifier,
* timestamp and the type
* @param leftCell the left hand side cell
* @param rightCell the right hand side cell
@ -121,7 +122,7 @@ public interface CellComparator extends Comparator<Cell>, Serializable {
int compareWithoutRow(Cell leftCell, Cell rightCell);
/**
* Lexographically compares the families of the two cells
* Lexicographically compares the families of the two cells
* @param leftCell the left hand side cell
* @param rightCell the right hand side cell
* @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both
@ -130,7 +131,7 @@ public interface CellComparator extends Comparator<Cell>, Serializable {
int compareFamilies(Cell leftCell, Cell rightCell);
/**
* Lexographically compares the qualifiers of the two cells
* Lexicographically compares the qualifiers of the two cells
* @param leftCell the left hand side cell
* @param rightCell the right hand side cell
* @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both
@ -163,10 +164,9 @@ public interface CellComparator extends Comparator<Cell>, Serializable {
int compareTimestamps(long leftCellts, long rightCellts);
/**
* @return A dumbed-down, fast comparator for hbase2 base-type, the {@link ByteBufferKeyValue}.
* Create an instance when you make a new memstore, when you know only BBKVs will be
* passed. Do not pollute with types other than BBKV if can be helped; the Comparator will
* slow.
* Return a dumbed-down, fast comparator for hbase2 base-type, the {@link ByteBufferKeyValue}.
* Create an instance when you make a new memstore, when you know only BBKVs will be passed. Do
* not pollute with types other than BBKV if can be helped; the Comparator will slow.
*/
Comparator getSimpleComparator();
Comparator<Cell> getSimpleComparator();
}

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hbase;
import java.util.Comparator;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
@ -691,11 +690,11 @@ public class CellComparatorImpl implements CellComparator {
int rFamLength = right.getFamilyLength();
int lQualLength = left.getQualifierLength();
int rQualLength = right.getQualifierLength();
if (lFamLength + lQualLength == 0 && left.getTypeByte() == Type.Minimum.getCode()) {
if (lFamLength + lQualLength == 0 && left.getTypeByte() == KeyValue.Type.Minimum.getCode()) {
// left is "bigger", i.e. it appears later in the sorted order
return 1;
}
if (rFamLength + rQualLength == 0 && right.getTypeByte() == Type.Minimum.getCode()) {
if (rFamLength + rQualLength == 0 && right.getTypeByte() == KeyValue.Type.Minimum.getCode()) {
return -1;
}
if (lFamLength != rFamLength) {

View File

@ -30,7 +30,6 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.function.Function;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
@ -486,8 +485,7 @@ public final class CellUtil {
}
/**
* Finds if the qualifier part of the cell and the KV serialized byte[] are equal n * @param buf
* the serialized keyvalue format byte[]
* Finds if the qualifier part of the cell and the KV serialized byte[] are equal.
* @return true if the qualifier matches, false otherwise
*/
public static boolean matchingQualifier(final Cell left, final byte[] buf) {
@ -561,8 +559,8 @@ public final class CellUtil {
}
/**
* @return True if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily}
* or a {@link KeyValue.Type#DeleteColumn} KeyValue type.
* Return true if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily}
* or a {@link KeyValue.Type#DeleteColumn} KeyValue type.
*/
@SuppressWarnings("deprecation")
public static boolean isDelete(final Cell cell) {
@ -572,13 +570,13 @@ public final class CellUtil {
/** Returns True if this cell is a Put. */
@SuppressWarnings("deprecation")
public static boolean isPut(Cell cell) {
return cell.getTypeByte() == Type.Put.getCode();
return cell.getTypeByte() == KeyValue.Type.Put.getCode();
}
/**
* Sets the given timestamp to the cell. Note that this method is a LimitedPrivate API and may
* change between minor releases. nn * @throws IOException when the passed cell is not of type
* {@link ExtendedCell}
* change between minor releases.
* @throws IOException when the passed cell is not of type {@link ExtendedCell}
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
public static void setTimestamp(Cell cell, long ts) throws IOException {
@ -587,8 +585,7 @@ public final class CellUtil {
/**
* Sets the given timestamp to the cell. Note that this method is a LimitedPrivate API and may
* change between minor releases. n * @param ts buffer containing the timestamp value
* @param tsOffset offset to the new timestamp
* change between minor releases.
* @throws IOException when the passed cell is not of type {@link ExtendedCell}
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@ -603,6 +600,7 @@ public final class CellUtil {
}
/**
* Return the Key portion of the passed <code>cell</code> as a String.
* @param cell the cell to convert
* @param rowConverter used to convert the row of the cell to a string
* @return The Key portion of the passed <code>cell</code> as a String.
@ -623,7 +621,7 @@ public final class CellUtil {
sb.append('/');
sb.append(KeyValue.humanReadableTimestamp(cell.getTimestamp()));
sb.append('/');
sb.append(Type.codeToType(cell.getTypeByte()));
sb.append(KeyValue.Type.codeToType(cell.getTypeByte()));
if (!(cell instanceof KeyValue.KeyOnlyKeyValue)) {
sb.append("/vlen=");
sb.append(cell.getValueLength());
@ -676,15 +674,14 @@ public final class CellUtil {
return CellComparator.getInstance().compareTimestamps(a.getTimestamp(), b.getTimestamp()) == 0;
}
/**
* Compares the row of two keyvalues for equality nn * @return True if rows match.
*/
/** Compares the row of two keyvalues for equality */
public static boolean matchingRows(final Cell left, final Cell right) {
short lrowlength = left.getRowLength();
short rrowlength = right.getRowLength();
return matchingRows(left, lrowlength, right, rrowlength);
}
/** Compares the row of two keyvalues for equality */
public static boolean matchingRows(final Cell left, final short lrowlength, final Cell right,
final short rrowlength) {
if (lrowlength != rrowlength) return false;
@ -708,10 +705,7 @@ public final class CellUtil {
right.getRowOffset(), rrowlength);
}
/**
* Compares the row and column of two keyvalues for equality nn * @return True if same row and
* column.
*/
/** Compares the row and column of two keyvalues for equality */
public static boolean matchingRowColumn(final Cell left, final Cell right) {
short lrowlength = left.getRowLength();
short rrowlength = right.getRowLength();
@ -738,6 +732,7 @@ public final class CellUtil {
return matchingColumn(left, lfamlength, lqlength, right, rfamlength, rqlength);
}
/** Compares the row and column of two keyvalues for equality */
public static boolean matchingRowColumnBytes(final Cell left, final Cell right) {
int lrowlength = left.getRowLength();
int rrowlength = right.getRowLength();

View File

@ -134,6 +134,7 @@ public class ChoreService {
}
/**
* Schedule a chore.
* @param chore Chore to be scheduled. If the chore is already scheduled with another ChoreService
* instance, that schedule will be cancelled (i.e. a Chore can only ever be scheduled
* with a single ChoreService instance).
@ -252,8 +253,8 @@ public class ChoreService {
}
/**
* @return number of chores that this service currently has scheduled that are missing their
* scheduled start time
* Return number of chores that this service currently has scheduled that are missing their
* scheduled start time
*/
int getNumberOfChoresMissingStartTime() {
return choresMissingStartTime.size();
@ -273,9 +274,6 @@ public class ChoreService {
private final static String THREAD_NAME_SUFFIX = ".Chore.";
private AtomicInteger threadNumber = new AtomicInteger(1);
/**
* @param threadPrefix The prefix given to all threads created by this factory
*/
public ChoreServiceThreadFactory(final String threadPrefix) {
this.threadPrefix = threadPrefix;
}
@ -350,9 +348,9 @@ public class ChoreService {
}
/**
* shutdown the service. Any chores that are scheduled for execution will be cancelled. Any chores
* in the middle of execution will be interrupted and shutdown. This service will be unusable
* after this method has been called (i.e. future scheduling attempts will fail).
* Shut down the service. Any chores that are scheduled for execution will be cancelled. Any
* chores in the middle of execution will be interrupted and shutdown. This service will be
* unusable after this method has been called (i.e. future scheduling attempts will fail).
* <p/>
* Notice that, this will only clean the chore from this ChoreService but you could still schedule
* the chore with other ChoreService.
@ -390,9 +388,7 @@ public class ChoreService {
}
}
/**
* Prints a summary of important details about the chore. Used for debugging purposes
*/
/** Prints a summary of important details about the chore. Used for debugging purposes */
private void printChoreDetails(final String header, ScheduledChore chore) {
if (!LOG.isTraceEnabled()) {
return;
@ -408,9 +404,7 @@ public class ChoreService {
}
}
/**
* Prints a summary of important details about the service. Used for debugging purposes
*/
/** Prints a summary of important details about the service. Used for debugging purposes */
private void printChoreServiceDetails(final String header) {
if (!LOG.isTraceEnabled()) {
return;

View File

@ -105,6 +105,7 @@ public class HBaseConfiguration extends Configuration {
}
/**
* Creates a Configuration with HBase resources
* @param that Configuration to clone.
* @return a Configuration created with the hbase-*.xml files plus the given configuration.
*/

View File

@ -86,9 +86,7 @@ public class HDFSBlocksDistribution {
return weightForSsd;
}
/**
* comparator used to sort hosts based on weight
*/
/** Comparator used to sort hosts based on weight */
public static class WeightComparator implements Comparator<HostAndWeight> {
@Override
public int compare(HostAndWeight l, HostAndWeight r) {
@ -100,16 +98,10 @@ public class HDFSBlocksDistribution {
}
}
/**
* Constructor
*/
public HDFSBlocksDistribution() {
this.hostAndWeights = new TreeMap<>();
}
/**
* @see java.lang.Object#toString()
*/
@Override
public synchronized String toString() {
return "number of unique hosts in the distribution=" + this.hostAndWeights.size();
@ -206,14 +198,13 @@ public class HDFSBlocksDistribution {
return uniqueBlocksTotalWeight;
}
/**
* Implementations 'visit' hostAndWeight.
*/
/** Implementations 'visit' hostAndWeight. */
public interface Visitor {
long visit(final HostAndWeight hostAndWeight);
}
/**
* Get the block locality index for a given host
* @param host the host name
* @return the locality index of the given host
*/
@ -227,6 +218,7 @@ public class HDFSBlocksDistribution {
}
/**
* Get the block locality index for a ssd for a given host
* @param host the host name
* @return the locality index with ssd of the given host
*/
@ -240,6 +232,7 @@ public class HDFSBlocksDistribution {
}
/**
* Get the blocks local weight for a given host
* @param host the host name
* @return the blocks local weight of the given host
*/
@ -248,6 +241,7 @@ public class HDFSBlocksDistribution {
}
/**
* Get the blocks local weight with ssd for a given host
* @param host the host name
* @return the blocks local with ssd weight of the given host
*/
@ -255,10 +249,6 @@ public class HDFSBlocksDistribution {
return getBlocksLocalityWeightInternal(host, HostAndWeight::getWeightForSsd);
}
/**
* @param host the host name
* @return the locality index of the given host
*/
private long getBlocksLocalityWeightInternal(String host, Visitor visitor) {
long localityIndex = 0;
HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
@ -293,9 +283,7 @@ public class HDFSBlocksDistribution {
addUniqueWeight(otherBlocksDistribution.getUniqueBlocksTotalWeight());
}
/**
* return the sorted list of hosts in terms of their weights
*/
/** Return the sorted list of hosts in terms of their weights */
public List<String> getTopHosts() {
HostAndWeight[] hostAndWeights = getTopHostsWithWeights();
List<String> topHosts = new ArrayList<>(hostAndWeights.length);
@ -305,9 +293,7 @@ public class HDFSBlocksDistribution {
return topHosts;
}
/**
* return the sorted list of hosts in terms of their weights
*/
/** Return the sorted list of hosts in terms of their weights */
public HostAndWeight[] getTopHostsWithWeights() {
NavigableSet<HostAndWeight> orderedHosts = new TreeSet<>(new HostAndWeight.WeightComparator());
orderedHosts.addAll(this.hostAndWeights.values());

View File

@ -151,7 +151,7 @@ public class IndividualBytesFieldCell implements ExtendedCell, Cloneable {
// If row is null or rLength is invalid, the constructor will reject it, by
// {@link KeyValue#checkParameters()}, so it is safe to call rLength and make the type
// conversion.
return (short) (rLength);
return (short) rLength;
}
// 2) Family
@ -170,7 +170,7 @@ public class IndividualBytesFieldCell implements ExtendedCell, Cloneable {
public byte getFamilyLength() {
// If fLength is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()},
// so it is safe to make the type conversion.
return (byte) (fLength);
return (byte) fLength;
}
// 3) Qualifier

View File

@ -1075,10 +1075,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
+ getValueLength() + "/seqid=" + seqId;
}
/**
* @param k Key portion of a KeyValue.
* @return Key as a String, empty string if k is null.
*/
/** Return key as a String, empty string if k is null. */
public static String keyToString(final byte[] k) {
if (k == null) {
return "";
@ -1328,10 +1325,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
return getTimestampOffset(getKeyLength());
}
/**
* @param keylength Pass if you have it to save on a int creation.
* @return Timestamp offset
*/
/** Return the timestamp offset */
private int getTimestampOffset(final int keylength) {
return getKeyOffset() + keylength - TIMESTAMP_TYPE_SIZE;
}
@ -1343,6 +1337,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
}
/**
* Update the timestamp.
* @param now Time to set into <code>this</code> IFF timestamp ==
* {@link HConstants#LATEST_TIMESTAMP} (else, its a noop).
* @return True is we modified this.
@ -1386,17 +1381,13 @@ public class KeyValue implements ExtendedCell, Cloneable {
return key;
}
/**
* n
*/
/** Return the timestamp. */
@Override
public long getTimestamp() {
return getTimestamp(getKeyLength());
}
/**
* @param keylength Pass if you have it to save on a int creation. n
*/
/** Return the timestamp. */
long getTimestamp(final int keylength) {
int tsOffset = getTimestampOffset(keylength);
return Bytes.toLong(this.bytes, tsOffset);
@ -1408,13 +1399,12 @@ public class KeyValue implements ExtendedCell, Cloneable {
return getTypeByte(getKeyLength());
}
/** Return the KeyValue.TYPE byte representation */
byte getTypeByte(int keyLength) {
return this.bytes[this.offset + keyLength - 1 + ROW_OFFSET];
}
/**
* This returns the offset where the tag actually starts.
*/
/** Return the offset where the tag data starts. */
@Override
public int getTagsOffset() {
int tagsLen = getTagsLength();
@ -1424,9 +1414,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
return this.offset + this.length - tagsLen;
}
/**
* This returns the total length of the tag bytes
*/
/** Return the total length of the tag bytes */
@Override
public int getTagsLength() {
int tagsLen = this.length - (getKeyLength() + getValueLength() + KEYVALUE_INFRASTRUCTURE_SIZE);
@ -1466,6 +1454,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
}
/**
* Find index of passed delimiter walking from start of buffer forwards.
* @param b the kv serialized byte[] to process
* @param delimiter input delimeter to fetch index from start
* @return Index of delimiter having started from start of <code>b</code> moving rightward.
@ -1749,6 +1738,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
}
/**
* Compares the rows of a cell
* @param left left cell to compare rows for
* @param right right cell to compare rows for
* @return Result comparing rows.
@ -1996,6 +1986,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
}
/**
* Compares the row of two keyvalues for equality
* @param left left cell to compare row
* @param lrowlength left row length
* @param right right cell to compare row
@ -2124,6 +2115,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
}
/**
* Create a KeyValue reading from <code>in</code>
* @param in Where to read bytes from. Creates a byte array to hold the KeyValue backing bytes
* copied from the steam.
* @return KeyValue created by deserializing from <code>in</code> OR if we find a length of zero,
@ -2250,7 +2242,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
return fixed + ClassSize.sizeOfByteArray(length);
} else {
// only count the number of bytes
return fixed + length;
return (long) fixed + length;
}
}
@ -2339,6 +2331,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
return this.bytes[getFamilyOffset() - 1];
}
@Override
int getFamilyLengthPosition(int rowLength) {
return this.offset + Bytes.SIZEOF_SHORT + rowLength;
}
@ -2378,6 +2371,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
return getTypeByte(getKeyLength());
}
@Override
byte getTypeByte(int keyLength) {
return this.bytes[this.offset + keyLength - 1];
}

View File

@ -26,7 +26,6 @@ import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@ -52,9 +51,9 @@ public class KeyValueUtil {
public static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) {
if (withTags) {
return (int) (KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen, tlen));
return (int) KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen, tlen);
}
return (int) (KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen));
return (int) KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen);
}
/**
@ -214,7 +213,6 @@ public class KeyValueUtil {
if (bb.remaining() < 1) {
return null;
}
KeyValue keyValue = null;
int underlyingArrayOffset = bb.arrayOffset() + bb.position();
int keyLength = bb.getInt();
int valueLength = bb.getInt();
@ -226,7 +224,7 @@ public class KeyValueUtil {
ByteBufferUtils.skip(bb, tagsLength);
}
int kvLength = (int) KeyValue.getKeyValueDataStructureSize(keyLength, valueLength, tagsLength);
keyValue = new KeyValue(bb.array(), underlyingArrayOffset, kvLength);
KeyValue keyValue = new KeyValue(bb.array(), underlyingArrayOffset, kvLength);
if (includesMvccVersion) {
long mvccVersion = ByteBufferUtils.readVLong(bb);
keyValue.setSequenceId(mvccVersion);
@ -257,7 +255,7 @@ public class KeyValueUtil {
final byte[] family, final int foffset, final int flength, final byte[] qualifier,
final int qoffset, final int qlength) {
return new KeyValue(row, roffset, rlength, family, foffset, flength, qualifier, qoffset,
qlength, PrivateConstants.OLDEST_TIMESTAMP, Type.Minimum, null, 0, 0);
qlength, PrivateConstants.OLDEST_TIMESTAMP, KeyValue.Type.Minimum, null, 0, 0);
}
/**
@ -268,7 +266,7 @@ public class KeyValueUtil {
*/
public static KeyValue createFirstOnRow(final byte[] row, int roffset, short rlength) {
return new KeyValue(row, roffset, rlength, null, 0, 0, null, 0, 0, HConstants.LATEST_TIMESTAMP,
Type.Maximum, null, 0, 0);
KeyValue.Type.Maximum, null, 0, 0);
}
/**
@ -278,7 +276,7 @@ public class KeyValueUtil {
* @return Last possible KeyValue on passed <code>row</code>
*/
public static KeyValue createLastOnRow(final byte[] row) {
return new KeyValue(row, null, null, HConstants.LATEST_TIMESTAMP, Type.Minimum);
return new KeyValue(row, null, null, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Minimum);
}
/**
@ -299,7 +297,7 @@ public class KeyValueUtil {
* @return First possible key on passed <code>row</code> and timestamp.
*/
public static KeyValue createFirstOnRow(final byte[] row, final long ts) {
return new KeyValue(row, null, null, ts, Type.Maximum);
return new KeyValue(row, null, null, ts, KeyValue.Type.Maximum);
}
/**
@ -312,10 +310,12 @@ public class KeyValueUtil {
*/
public static KeyValue createFirstOnRow(final byte[] row, final byte[] family,
final byte[] qualifier) {
return new KeyValue(row, family, qualifier, HConstants.LATEST_TIMESTAMP, Type.Maximum);
return new KeyValue(row, family, qualifier, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum);
}
/**
* Create a KeyValue for the specified row, family and qualifier that would be smaller than all
* other possible KeyValues that have the same row, family, qualifier. Used for seeking.
* @param row - row key (arbitrary byte array)
* @param f - family name
* @param q - column qualifier
@ -324,7 +324,7 @@ public class KeyValueUtil {
*/
public static KeyValue createFirstOnRow(final byte[] row, final byte[] f, final byte[] q,
final long ts) {
return new KeyValue(row, f, q, ts, Type.Maximum);
return new KeyValue(row, f, q, ts, KeyValue.Type.Maximum);
}
/**
@ -345,7 +345,7 @@ public class KeyValueUtil {
final byte[] family, final int foffset, final int flength, final byte[] qualifier,
final int qoffset, final int qlength) {
return new KeyValue(row, roffset, rlength, family, foffset, flength, qualifier, qoffset,
qlength, HConstants.LATEST_TIMESTAMP, Type.Maximum, null, 0, 0);
qlength, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum, null, 0, 0);
}
/**
@ -565,7 +565,7 @@ public class KeyValueUtil {
throw new IllegalArgumentException(msg);
}
byte type = buf[pos];
if (!Type.isValidType(type)) {
if (!KeyValue.Type.isValidType(type)) {
String msg = "Invalid type in KeyValue, type=" + type + bytesToHex(buf, offset, length);
LOG.warn(msg);
throw new IllegalArgumentException(msg);
@ -740,7 +740,6 @@ public class KeyValueUtil {
int qlen = cell.getQualifierLength();
int vlen = cell.getValueLength();
int tlen = cell.getTagsLength();
int size = 0;
// write key length
int klen = keyLength(rlen, flen, qlen);
ByteBufferUtils.putInt(out, klen);
@ -760,7 +759,7 @@ public class KeyValueUtil {
out.write(cell.getTypeByte());
// write value
out.write(cell.getValueArray(), cell.getValueOffset(), vlen);
size = klen + vlen + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
int size = klen + vlen + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
// write tags if we have to
if (withTags && tlen > 0) {
// 2 bytes tags length followed by tags bytes

View File

@ -132,6 +132,7 @@ public final class PrivateCellUtil {
private static final int HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * ClassSize.REFERENCE;
/**
* Construct a TagRewriteCell
* @param cell The original Cell which it rewrites
* @param tags the tags bytes. The array suppose to contain the tags bytes alone.
*/
@ -828,8 +829,8 @@ public final class PrivateCellUtil {
}
/**
* @return True if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily}
* or a {@link KeyValue.Type#DeleteColumn} KeyValue type.
* Return true if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily}
* or a {@link KeyValue.Type#DeleteColumn} KeyValue type.
*/
public static boolean isDelete(final byte type) {
return KeyValue.Type.Delete.getCode() <= type && type <= KeyValue.Type.DeleteFamily.getCode();
@ -868,9 +869,7 @@ public final class PrivateCellUtil {
return output;
}
/**
* Copies the tags info into the tag portion of the cell nnn * @return position after tags
*/
/** Copies the tags info into the tag portion of the cell */
public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) {
int tlen = cell.getTagsLength();
if (cell instanceof ByteBufferExtendedCell) {
@ -884,9 +883,7 @@ public final class PrivateCellUtil {
return destinationOffset + tlen;
}
/**
* Copies the tags info into the tag portion of the cell nnn * @return the position after tags
*/
/** Copies the tags info into the tag portion of the cell */
public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) {
int tlen = cell.getTagsLength();
if (cell instanceof ByteBufferExtendedCell) {
@ -900,6 +897,7 @@ public final class PrivateCellUtil {
}
/**
* Return tags in the given Cell as a List
* @param cell The Cell
* @return Tags in the given Cell as a List
*/
@ -945,7 +943,7 @@ public final class PrivateCellUtil {
}
/**
* Util method to iterate through the tags in the given cell.
* Utility method to iterate through the tags in the given cell.
* @param cell The Cell over which tags iterator is needed.
* @return iterator for the tags
*/
@ -1028,9 +1026,7 @@ public final class PrivateCellUtil {
&& (end1.length == 0 || start2.length == 0 || Bytes.compareTo(start2, end1) < 0);
}
/**
* Write rowkey excluding the common part. nnnnn
*/
/** Write rowkey excluding the common part. */
public static void writeRowKeyExcludingCommon(Cell cell, short rLen, int commonPrefix,
DataOutputStream out) throws IOException {
if (commonPrefix == 0) {
@ -1238,8 +1234,9 @@ public final class PrivateCellUtil {
/**
* Compares only the key portion of a cell. It does not include the sequence id/mvcc of the cell
* nn * @return an int greater than 0 if left &gt; than right lesser than 0 if left &lt; than
* right equal to 0 if left is equal to right
* nn
* @return an int greater than 0 if left &gt; than right lesser than 0 if left &lt; than right
* equal to 0 if left is equal to right
*/
public static final int compareKeyIgnoresMvcc(CellComparator comparator, Cell left, Cell right) {
return ((CellComparatorImpl) comparator).compare(left, right, true);
@ -1834,7 +1831,7 @@ public final class PrivateCellUtil {
private static class FirstOnRowColCell extends FirstOnRowCell {
// @formatter:off
private static final long FIXED_HEAPSIZE = FirstOnRowCell.FIXED_HEAPSIZE
private static final long FIXED_HEAPSIZE = (long) FirstOnRowCell.FIXED_HEAPSIZE
+ Bytes.SIZEOF_BYTE // flength
+ Bytes.SIZEOF_INT * 3 // foffset, qoffset, qlength
+ ClassSize.REFERENCE * 2; // fArray, qArray
@ -2003,7 +2000,7 @@ public final class PrivateCellUtil {
private static class LastOnRowColCell extends LastOnRowCell {
// @formatter:off
private static final long FIXED_OVERHEAD = LastOnRowCell.FIXED_OVERHEAD
private static final long FIXED_OVERHEAD = (long) LastOnRowCell.FIXED_OVERHEAD
+ ClassSize.REFERENCE * 2 // fArray and qArray
+ Bytes.SIZEOF_INT * 3 // foffset, qoffset, qlength
+ Bytes.SIZEOF_BYTE; // flength
@ -2345,7 +2342,7 @@ public final class PrivateCellUtil {
}
/**
* Sets the given timestamp to the cell. n * @param ts buffer containing the timestamp value
* Sets the given timestamp to the cell.
* @throws IOException when the passed cell is not of type {@link ExtendedCell}
*/
public static void setTimestamp(Cell cell, byte[] ts) throws IOException {
@ -2359,7 +2356,8 @@ public final class PrivateCellUtil {
/**
* Sets the given timestamp to the cell iff current timestamp is
* {@link HConstants#LATEST_TIMESTAMP}. nn * @return True if cell timestamp is modified.
* {@link HConstants#LATEST_TIMESTAMP}.
* @return True if cell timestamp is modified.
* @throws IOException when the passed cell is not of type {@link ExtendedCell}
*/
public static boolean updateLatestStamp(Cell cell, long ts) throws IOException {
@ -2372,7 +2370,7 @@ public final class PrivateCellUtil {
/**
* Sets the given timestamp to the cell iff current timestamp is
* {@link HConstants#LATEST_TIMESTAMP}. n * @param ts buffer containing the timestamp value
* {@link HConstants#LATEST_TIMESTAMP}.
* @return True if cell timestamp is modified.
* @throws IOException when the passed cell is not of type {@link ExtendedCell}
*/
@ -2711,8 +2709,8 @@ public final class PrivateCellUtil {
}
/**
* @return An new cell is located following input cell. If both of type and timestamp are minimum,
* the input cell will be returned directly.
* Return a new cell is located following input cell. If both of type and timestamp are minimum,
* the input cell will be returned directly.
*/
public static Cell createNextOnRowCol(Cell cell) {
long ts = cell.getTimestamp();

View File

@ -79,6 +79,8 @@ public abstract class ScheduledChore implements Runnable {
private final Stoppable stopper;
/**
* Construct a ScheduledChore
* <p>
* This constructor is for test only. It allows us to create an object and to call chore() on it.
*/
@InterfaceAudience.Private
@ -87,6 +89,7 @@ public abstract class ScheduledChore implements Runnable {
}
/**
* Construct a ScheduledChore
* @param name Name assigned to Chore. Useful for identification amongst chores of the same
* type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore will cancel and cleanup
@ -97,6 +100,7 @@ public abstract class ScheduledChore implements Runnable {
}
/**
* Construct a ScheduledChore
* @param name Name assigned to Chore. Useful for identification amongst chores of the
* same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore will cancel and
@ -112,6 +116,7 @@ public abstract class ScheduledChore implements Runnable {
}
/**
* Construct a ScheduledChore
* @param name Name assigned to Chore. Useful for identification amongst chores of the
* same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore will cancel and
@ -132,9 +137,6 @@ public abstract class ScheduledChore implements Runnable {
this.timeUnit = unit;
}
/**
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
updateTimeTrackingBeforeRun();
@ -193,8 +195,8 @@ public abstract class ScheduledChore implements Runnable {
}
/**
* @return How long in millis has it been since this chore last run. Useful for checking if the
* chore has missed its scheduled start time by too large of a margin
* Return how long in millis has it been since this chore last run. Useful for checking if the
* chore has missed its scheduled start time by too large of a margin
*/
synchronized long getTimeBetweenRuns() {
return timeOfThisRun - timeOfLastRun;
@ -212,10 +214,7 @@ public abstract class ScheduledChore implements Runnable {
return 1.5 * timeUnit.toMillis(period);
}
/**
* @param time in system millis
* @return true if time is earlier or equal to current milli time
*/
/** Return true if time is earlier or equal to current time */
private synchronized boolean isValidTime(final long time) {
return time > 0 && time <= EnvironmentEdgeManager.currentTime();
}
@ -297,9 +296,7 @@ public abstract class ScheduledChore implements Runnable {
chore();
}
/**
* The task to execute on each scheduled execution of the Chore
*/
/** The task to execute on each scheduled execution of the Chore */
protected abstract void chore();
/**

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.collect.Interner;
import org.apache.hbase.thirdparty.com.google.common.collect.Interners;
import org.apache.hbase.thirdparty.com.google.common.net.InetAddresses;
@ -80,8 +82,8 @@ public class ServerName implements Comparable<ServerName>, Serializable {
*/
public static final String UNKNOWN_SERVERNAME = "#unknown#";
private final String servername;
private final long startcode;
private final String serverName;
private final long startCode;
private transient Address address;
/**
@ -97,15 +99,15 @@ public class ServerName implements Comparable<ServerName>, Serializable {
*/
private static final Interner<ServerName> INTERN_POOL = Interners.newWeakInterner();
protected ServerName(final String hostname, final int port, final long startcode) {
this(Address.fromParts(hostname, port), startcode);
protected ServerName(final String hostname, final int port, final long startCode) {
this(Address.fromParts(hostname, port), startCode);
}
private ServerName(final Address address, final long startcode) {
private ServerName(final Address address, final long startCode) {
// Use HostAndPort to host port and hostname. Does validation and can do ipv6
this.address = address;
this.startcode = startcode;
this.servername = getServerName(this.address.getHostname(), this.address.getPort(), startcode);
this.startCode = startCode;
this.serverName = getServerName(this.address.getHostname(), this.address.getPort(), startCode);
}
private ServerName(final String hostAndPort, final long startCode) {
@ -120,20 +122,20 @@ public class ServerName implements Comparable<ServerName>, Serializable {
if (InetAddresses.isInetAddress(hostname)) {
return hostname;
}
String[] parts = hostname.split("\\.");
if (parts.length == 0) {
List<String> parts = Splitter.on('.').splitToList(hostname);
if (parts.size() == 0) {
return hostname;
}
return parts[0];
Iterator<String> i = parts.iterator();
return i.next();
}
/**
* Retrieve an instance of ServerName. Callers should use the equals method to compare returned
* instances, though we may return a shared immutable object as an internal optimization.
*/
public static ServerName valueOf(final String hostname, final int port, final long startcode) {
return INTERN_POOL.intern(new ServerName(hostname, port, startcode));
public static ServerName valueOf(final String hostname, final int port, final long startCode) {
return INTERN_POOL.intern(new ServerName(hostname, port, startCode));
}
/**
@ -141,11 +143,12 @@ public class ServerName implements Comparable<ServerName>, Serializable {
* instances, though we may return a shared immutable object as an internal optimization.
*/
public static ServerName valueOf(final String serverName) {
final String hostname = serverName.substring(0, serverName.indexOf(SERVERNAME_SEPARATOR));
final int port = Integer.parseInt(serverName.split(SERVERNAME_SEPARATOR)[1]);
final long statuscode =
Long.parseLong(serverName.substring(serverName.lastIndexOf(SERVERNAME_SEPARATOR) + 1));
return INTERN_POOL.intern(new ServerName(hostname, port, statuscode));
int firstSep = serverName.indexOf(SERVERNAME_SEPARATOR);
int lastSep = serverName.lastIndexOf(SERVERNAME_SEPARATOR);
String hostname = serverName.substring(0, firstSep);
int port = Integer.parseInt(serverName.substring(firstSep + 1, lastSep));
long startCode = Long.parseLong(serverName.substring(lastSep + 1));
return valueOf(hostname, port, startCode);
}
/**
@ -161,12 +164,12 @@ public class ServerName implements Comparable<ServerName>, Serializable {
* method to compare returned instances, though we may return a shared immutable object as an
* internal optimization.
* @param address the {@link Address} to use for getting the {@link ServerName}
* @param startcode the startcode to use for getting the {@link ServerName}
* @param startCode the startcode to use for getting the {@link ServerName}
* @return the constructed {@link ServerName}
* @see #valueOf(String, int, long)
*/
public static ServerName valueOf(final Address address, final long startcode) {
return valueOf(address.getHostname(), address.getPort(), startcode);
public static ServerName valueOf(final Address address, final long startCode) {
return valueOf(address.getHostname(), address.getPort(), startCode);
}
@Override
@ -175,10 +178,9 @@ public class ServerName implements Comparable<ServerName>, Serializable {
}
/**
* @return Return a SHORT version of {@link #toString()}, one that has the host only, minus the
* domain, and the port only -- no start code; the String is for us internally mostly
* tying threads to their server. Not for external use. It is lossy and will not work in
* in compares, etc.
* Return a SHORT version of {@link #toString()}, one that has the host only, minus the domain,
* and the port only -- no start code; the String is for us internally mostly tying threads to
* their server. Not for external use. It is lossy and will not work in in compares, etc.
*/
public String toShortString() {
return Addressing.createHostAndPortStr(getHostNameMinusDomain(this.address.getHostname()),
@ -186,8 +188,8 @@ public class ServerName implements Comparable<ServerName>, Serializable {
}
/**
* @return {@link #getServerName()} as bytes with a short-sized prefix with the {@link #VERSION}
* of this class.
* Return {@link #getServerName()} as bytes with a short-sized prefix with the {@link #VERSION} of
* this class.
*/
public synchronized byte[] getVersionedBytes() {
if (this.bytes == null) {
@ -197,7 +199,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
}
public String getServerName() {
return servername;
return serverName;
}
public String getHostname() {
@ -212,21 +214,31 @@ public class ServerName implements Comparable<ServerName>, Serializable {
return this.address.getPort();
}
/**
* Return the start code.
* @deprecated Since 2.5.0, will be removed in 4.0.0. Use {@link #getStartCode()} instead.
*/
@Deprecated
public long getStartcode() {
return startcode;
return startCode;
}
/** Return the start code. */
public long getStartCode() {
return startCode;
}
/**
* For internal use only.
* @param hostName the name of the host to use
* @param port the port on the host to use
* @param startcode the startcode to use for formatting
* @param startCode the startcode to use for formatting
* @return Server name made of the concatenation of hostname, port and startcode formatted as
* <code>&lt;hostname&gt; ',' &lt;port&gt; ',' &lt;startcode&gt;</code>
*/
private static String getServerName(String hostName, int port, long startcode) {
private static String getServerName(String hostName, int port, long startCode) {
return hostName.toLowerCase(Locale.ROOT) + SERVERNAME_SEPARATOR + port + SERVERNAME_SEPARATOR
+ startcode;
+ startCode;
}
public Address getAddress() {
@ -256,7 +268,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
if (compare != 0) {
return compare;
}
return Long.compare(this.getStartcode(), other.getStartcode());
return Long.compare(this.getStartCode(), other.getStartCode());
}
@Override
@ -279,6 +291,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
}
/**
* Compare two addresses
* @param left the first server address to compare
* @param right the second server address to compare
* @return {@code true} if {@code left} and {@code right} have the same hostname and port.
@ -308,6 +321,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
}
/**
* Parse a ServerName from a string
* @param str Either an instance of {@link #toString()} or a "'&lt;hostname&gt;' ':'
* '&lt;port&gt;'".
* @return A ServerName instance.

View File

@ -274,9 +274,6 @@ public final class TableName implements Comparable<TableName> {
return nameAsString;
}
/**
* @throws IllegalArgumentException See {@link #valueOf(byte[])}
*/
private TableName(ByteBuffer namespace, ByteBuffer qualifier) throws IllegalArgumentException {
this.qualifier = new byte[qualifier.remaining()];
qualifier.duplicate().get(this.qualifier);
@ -320,9 +317,7 @@ public final class TableName implements Comparable<TableName> {
isLegalTableQualifierName(this.qualifier);
}
/**
* This is only for the old and meta tables.
*/
/** This is only for the old and meta tables. */
private TableName(String qualifier) {
this.qualifier = Bytes.toBytes(qualifier);
this.qualifierAsString = qualifier;
@ -394,6 +389,7 @@ public final class TableName implements Comparable<TableName> {
}
/**
* Construct a TableName
* @param fullName will use the entire byte array
* @throws IllegalArgumentException if fullName equals old root or old meta. Some code depends on
* this. The test is buried in the table creation to save on
@ -405,6 +401,7 @@ public final class TableName implements Comparable<TableName> {
}
/**
* Construct a TableName
* @param fullName byte array to look into
* @param offset within said array
* @param length within said array
@ -437,6 +434,7 @@ public final class TableName implements Comparable<TableName> {
}
/**
* Construct a TableName
* @param fullname of a table, possibly with a leading namespace and ':' as delimiter.
* @throws IllegalArgumentException if fullName equals old root or old meta.
*/
@ -462,6 +460,7 @@ public final class TableName implements Comparable<TableName> {
}
/**
* Construct a TableName
* @throws IllegalArgumentException if fullName equals old root or old meta. Some code depends on
* this.
*/
@ -526,12 +525,12 @@ public final class TableName implements Comparable<TableName> {
return hashCode;
}
/**
* For performance reasons, the ordering is not lexicographic.
*/
@Override
public int compareTo(TableName tableName) {
if (this == tableName) return 0;
// For performance reasons, the ordering is not lexicographic.
if (this == tableName) {
return 0;
}
if (this.hashCode < tableName.hashCode()) {
return -1;
}

View File

@ -55,15 +55,13 @@ public interface Tag {
/** Returns Length of tag value within the backed buffer */
int getValueLength();
/**
* Tells whether or not this Tag is backed by a byte array.
* @return true when this Tag is backed by byte array
*/
/** Return true if the tag is backed by a byte array */
boolean hasArray();
/**
* @return The array containing the value bytes. n * when {@link #hasArray()} return false. Use
* {@link #getValueByteBuffer()} in such situation
* Return an array containing the value bytes if {@link #hasArray()} returns true.
* <p>
* Use {@link #getValueByteBuffer()} otherwise.
*/
byte[] getValueArray();

View File

@ -70,9 +70,7 @@ public final class TagUtil {
return carryForwardTags(null, cell);
}
/**
* Add to <code>tagsOrNull</code> any Tags <code>cell</code> is carrying or null if none.
*/
/** Add to <code>tagsOrNull</code> any Tags <code>cell</code> is carrying or null if none. */
public static List<Tag> carryForwardTags(final List<Tag> tagsOrNull, final Cell cell) {
Iterator<Tag> itr = PrivateCellUtil.tagsIterator(cell);
if (itr == EMPTY_TAGS_ITR) {
@ -166,9 +164,7 @@ public final class TagUtil {
return b;
}
/**
* Iterator returned when no Tags. Used by CellUtil too.
*/
/** Iterator returned when no Tags. Used by CellUtil too. */
static final Iterator<Tag> EMPTY_TAGS_ITR = new Iterator<Tag>() {
@Override
public boolean hasNext() {

View File

@ -50,10 +50,12 @@ public abstract class ByteArrayComparable implements Comparable<byte[]> {
public abstract byte[] toByteArray();
/**
* Parse a serialized representation of {@link ByteArrayComparable}
* @param pbBytes A pb serialized {@link ByteArrayComparable} instance
* @return An instance of {@link ByteArrayComparable} made from <code>bytes</code> n * @see
* #toByteArray
* @return An instance of {@link ByteArrayComparable} made from <code>bytes</code>
* @see #toByteArray
*/
@SuppressWarnings("DoNotCallSuggester")
public static ByteArrayComparable parseFrom(final byte[] pbBytes)
throws DeserializationException {
throw new DeserializationException(
@ -61,12 +63,13 @@ public abstract class ByteArrayComparable implements Comparable<byte[]> {
}
/**
* n * @return true if and only if the fields of the comparator that are serialized are equal to
* the corresponding fields in other. Used for testing.
* Return true if and only if the fields of the comparator that are serialized are equal to the
* corresponding fields in other.
*/
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (other == this) {
return true;
}
return Bytes.equals(this.getValue(), other.getValue());
}

View File

@ -41,8 +41,8 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface HeapSize {
/**
* @return Approximate 'exclusive deep size' of implementing object. Includes count of payload and
* hosting object sizings.
* Return the approximate 'exclusive deep size' of implementing object. Includes count of payload
* and hosting object sizings.
*/
long heapSize();
}

View File

@ -91,16 +91,12 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
return this.bytes;
}
/**
* @param b Use passed bytes as backing array for this instance.
*/
/** Use passed bytes as backing array for this instance. */
public void set(final byte[] b) {
set(b, 0, b.length);
}
/**
* @param b Use passed bytes as backing array for this instance. nn
*/
/** Use passed bytes as backing array for this instance. */
public void set(final byte[] b, final int offset, final int length) {
this.bytes = b;
this.offset = offset;
@ -116,9 +112,7 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
return this.length;
}
/**
* n
*/
/** Return the offset into the buffer. */
public int getOffset() {
return this.offset;
}
@ -138,6 +132,7 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
}
// Below methods copied from BytesWritable
@Override
public int hashCode() {
int hash = 1;
@ -167,9 +162,6 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
that.length);
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object right_obj) {
if (right_obj instanceof byte[]) {
@ -181,9 +173,6 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
return false;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder(3 * this.length);
@ -200,21 +189,15 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
return sb.length() > 0 ? sb.substring(1) : "";
}
/**
* A Comparator optimized for ImmutableBytesWritable.
*/
/** A Comparator optimized for ImmutableBytesWritable. */
@InterfaceAudience.Public
public static class Comparator extends WritableComparator {
private BytesWritable.Comparator comparator = new BytesWritable.Comparator();
/** constructor */
public Comparator() {
super(ImmutableBytesWritable.class);
}
/**
* @see org.apache.hadoop.io.WritableComparator#compare(byte[], int, int, byte[], int, int)
*/
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return comparator.compare(b1, s1, l1, b2, s2, l2);
@ -226,6 +209,7 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
}
/**
* Convert a list of byte arrays into an array of byte arrays
* @param array List of byte [].
* @return Array of byte [].
*/
@ -238,9 +222,7 @@ public class ImmutableBytesWritable implements WritableComparable<ImmutableBytes
return results;
}
/**
* Returns a copy of the bytes referred to by this writable
*/
/** Returns a copy of the bytes referred to by this writable */
public byte[] copyBytes() {
return Arrays.copyOfRange(bytes, offset, offset + length);
}

View File

@ -81,7 +81,8 @@ public class TagCompressionContext {
public void compressTags(OutputStream out, ByteBuffer in, int offset, int length)
throws IOException {
if (in.hasArray()) {
compressTags(out, in.array(), offset, length);
// Offset we are given is relative to ByteBuffer#arrayOffset
compressTags(out, in.array(), in.arrayOffset() + offset, length);
} else {
int pos = offset;
int endOffset = pos + length;

View File

@ -100,7 +100,7 @@ public class ReusableStreamGzipCodec extends GzipCodec {
def.finish();
while (!def.finished()) {
int i = def.deflate(this.buf, 0, this.buf.length);
if ((def.finished()) && (i <= this.buf.length - TRAILER_SIZE)) {
if (def.finished() && (i <= this.buf.length - TRAILER_SIZE)) {
writeTrailer(this.buf, i);
i += TRAILER_SIZE;
out.write(this.buf, 0, i);

View File

@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.io.TagCompressionContext;
@ -279,14 +278,14 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
protected static class OnheapDecodedCell implements ExtendedCell {
private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
+ (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)
+ (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));
+ Bytes.SIZEOF_SHORT + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY));
private byte[] keyOnlyBuffer;
private short rowLength;
private int familyOffset;
private byte familyLength;
private int qualifierOffset;
private int qualifierLength;
private long timestamp;
private long timeStamp;
private byte typeByte;
private byte[] valueBuffer;
private int valueOffset;
@ -306,7 +305,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
this.familyLength = familyLength;
this.qualifierOffset = qualOffset;
this.qualifierLength = qualLength;
this.timestamp = timeStamp;
this.timeStamp = timeStamp;
this.typeByte = typeByte;
this.valueBuffer = valueBuffer;
this.valueOffset = valueOffset;
@ -364,7 +363,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
@Override
public long getTimestamp() {
return timestamp;
return timeStamp;
}
@Override
@ -475,16 +474,17 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
}
protected static class OffheapDecodedExtendedCell extends ByteBufferExtendedCell {
private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
+ (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)
+ (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER));
private static final long FIXED_OVERHEAD =
(long) ClassSize.align(ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
+ (7 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE)
+ (3 * ClassSize.BYTE_BUFFER);
private ByteBuffer keyBuffer;
private short rowLength;
private int familyOffset;
private byte familyLength;
private int qualifierOffset;
private int qualifierLength;
private long timestamp;
private long timeStamp;
private byte typeByte;
private ByteBuffer valueBuffer;
private int valueOffset;
@ -507,7 +507,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
this.familyLength = familyLength;
this.qualifierOffset = qualOffset;
this.qualifierLength = qualLength;
this.timestamp = timeStamp;
this.timeStamp = timeStamp;
this.typeByte = typeByte;
this.valueBuffer = valueBuffer;
this.valueOffset = valueOffset;
@ -519,6 +519,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
}
@Override
@SuppressWarnings("ByteBufferBackingArray")
public byte[] getRowArray() {
return this.keyBuffer.array();
}
@ -534,6 +535,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
}
@Override
@SuppressWarnings("ByteBufferBackingArray")
public byte[] getFamilyArray() {
return this.keyBuffer.array();
}
@ -549,6 +551,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
}
@Override
@SuppressWarnings("ByteBufferBackingArray")
public byte[] getQualifierArray() {
return this.keyBuffer.array();
}
@ -565,7 +568,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
@Override
public long getTimestamp() {
return this.timestamp;
return this.timeStamp;
}
@Override
@ -671,10 +674,10 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
@Override
public int write(OutputStream out, boolean withTags) throws IOException {
int lenToWrite = getSerializedSize(withTags);
ByteBufferUtils.putInt(out, keyBuffer.capacity());
ByteBufferUtils.putInt(out, keyBuffer.remaining());
ByteBufferUtils.putInt(out, valueLength);
// Write key
out.write(keyBuffer.array());
out.write(keyBuffer.array(), keyBuffer.arrayOffset(), keyBuffer.remaining());
// Write value
ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength);
if (withTags && this.tagsLength > 0) {
@ -928,14 +931,14 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
private int compareTypeBytes(Cell key, Cell right) {
if (
key.getFamilyLength() + key.getQualifierLength() == 0
&& key.getTypeByte() == Type.Minimum.getCode()
&& key.getTypeByte() == KeyValue.Type.Minimum.getCode()
) {
// left is "bigger", i.e. it appears later in the sorted order
return 1;
}
if (
right.getFamilyLength() + right.getQualifierLength() == 0
&& right.getTypeByte() == Type.Minimum.getCode()
&& right.getTypeByte() == KeyValue.Type.Minimum.getCode()
) {
return -1;
}

View File

@ -28,6 +28,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* assign it a new id. Announce the new id in the HBase mailing list to prevent collisions.
*/
@InterfaceAudience.Public
@SuppressWarnings("ImmutableEnumChecker")
public enum DataBlockEncoding {
/** Disable data block encoding. */
@ -100,6 +101,8 @@ public enum DataBlockEncoding {
* @param dest output array
* @param offset starting offset of the output array n
*/
// System.arraycopy is static native. Nothing we can do this until we have minimum JDK 9.
@SuppressWarnings("UnsafeFinalization")
public void writeIdInBytes(byte[] dest, int offset) throws IOException {
System.arraycopy(idInBytes, 0, dest, offset, ID_SIZE);
}
@ -159,10 +162,10 @@ public enum DataBlockEncoding {
return algorithm;
}
protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName) {
static DataBlockEncoder createEncoder(String fullyQualifiedClassName) {
try {
return (DataBlockEncoder) Class.forName(fullyQualifiedClassName).getDeclaredConstructor()
.newInstance();
return Class.forName(fullyQualifiedClassName).asSubclass(DataBlockEncoder.class)
.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -304,7 +304,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
ByteBuffer result = ByteBuffer.allocate(keyLength);
// copy row
assert !(result.isDirect());
assert !result.isDirect();
int pos = result.arrayOffset();
block.get(result.array(), pos, Bytes.SIZEOF_SHORT);
pos += Bytes.SIZEOF_SHORT;

View File

@ -143,8 +143,9 @@ public class EncodedDataBlock {
ByteBufferUtils.skip(decompressedData, tagsLen);
}
}
KeyValue kv = new KeyValue(decompressedData.array(), offset,
(int) KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen));
KeyValue kv =
new KeyValue(decompressedData.array(), decompressedData.arrayOffset() + offset,
(int) KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen));
if (meta.isIncludesMvcc()) {
long mvccVersion = ByteBufferUtils.readVLong(decompressedData);
kv.setSequenceId(mvccVersion);
@ -271,7 +272,7 @@ public class EncodedDataBlock {
if (this.meta.isIncludesMvcc()) {
memstoreTS = ByteBufferUtils.readVLong(in);
}
kv = new KeyValue(in.array(), kvOffset,
kv = new KeyValue(in.array(), in.arrayOffset() + kvOffset,
(int) KeyValue.getKeyValueDataStructureSize(klength, vlength, tagsLength));
kv.setSequenceId(memstoreTS);
this.dataBlockEncoder.encode(kv, encodingCtx, out);

View File

@ -43,23 +43,20 @@ public interface HFileBlockEncodingContext {
*/
void postEncoding(BlockType blockType) throws IOException;
/**
* Releases the resources used.
*/
/** Releases the resources used. */
void close();
/** Returns HFile context information */
HFileContext getHFileContext();
/**
* Sets the encoding state.
*/
/** Sets the encoding state. */
void setEncodingState(EncodingState state);
/** Returns the encoding state */
EncodingState getEncodingState();
/**
* Compress and encrypt the supplied encoded block data with header.
* @param data encoded bytes with header
* @param offset the offset in encoded data to start at
* @param length the number of encoded bytes

View File

@ -80,12 +80,13 @@ public class RowIndexSeekerV1 extends AbstractEncodedSeeker {
}
@Override
@SuppressWarnings("ByteBufferBackingArray")
public Cell getKey() {
if (current.keyBuffer.hasArray()) {
return new KeyValue.KeyOnlyKeyValue(current.keyBuffer.array(),
current.keyBuffer.arrayOffset() + current.keyBuffer.position(), current.keyLength);
} else {
byte[] key = new byte[current.keyLength];
final byte[] key = new byte[current.keyLength];
ByteBufferUtils.copyFromBufferToArray(key, current.keyBuffer, current.keyBuffer.position(), 0,
current.keyLength);
return new KeyValue.KeyOnlyKeyValue(key, 0, current.keyLength);

View File

@ -31,6 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* The values in the enum appear in the order they appear in a version 2 HFile.
*/
@InterfaceAudience.Private
@SuppressWarnings("ImmutableEnumChecker")
public enum BlockType {
// Scanned block section
@ -179,6 +180,8 @@ public enum BlockType {
* @param offset position in the array
* @return incremented offset
*/
// System.arraycopy is static native. We can't do anything about this until minimum JDK is 9.
@SuppressWarnings("UnsafeFinalization")
public int put(byte[] bytes, int offset) {
System.arraycopy(magic, 0, bytes, offset, MAGIC_LENGTH);
return offset + MAGIC_LENGTH;

View File

@ -56,7 +56,7 @@ public class HFileContext implements HeapSize, Cloneable {
/** the number of bytes per checksum value **/
private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;
/** Number of uncompressed bytes we allow per block. */
private int blocksize = HConstants.DEFAULT_BLOCKSIZE;
private int blockSize = HConstants.DEFAULT_BLOCKSIZE;
private DataBlockEncoding encoding = DataBlockEncoding.NONE;
/** Encryption algorithm and key used */
private Encryption.Context cryptoContext = Encryption.Context.NONE;
@ -81,7 +81,7 @@ public class HFileContext implements HeapSize, Cloneable {
this.compressTags = context.compressTags;
this.checksumType = context.checksumType;
this.bytesPerChecksum = context.bytesPerChecksum;
this.blocksize = context.blocksize;
this.blockSize = context.blockSize;
this.encoding = context.encoding;
this.cryptoContext = context.cryptoContext;
this.fileCreateTime = context.fileCreateTime;
@ -103,7 +103,7 @@ public class HFileContext implements HeapSize, Cloneable {
this.compressTags = compressTags;
this.checksumType = checksumType;
this.bytesPerChecksum = bytesPerChecksum;
this.blocksize = blockSize;
this.blockSize = blockSize;
if (encoding != null) {
this.encoding = encoding;
}
@ -175,7 +175,7 @@ public class HFileContext implements HeapSize, Cloneable {
}
public int getBlocksize() {
return blocksize;
return blockSize;
}
public long getFileCreateTime() {
@ -233,7 +233,7 @@ public class HFileContext implements HeapSize, Cloneable {
@Override
public HFileContext clone() {
try {
return (HFileContext) (super.clone());
return (HFileContext) super.clone();
} catch (CloneNotSupportedException e) {
throw new AssertionError(); // Won't happen
}
@ -250,7 +250,7 @@ public class HFileContext implements HeapSize, Cloneable {
sb.append(", bytesPerChecksum=");
sb.append(bytesPerChecksum);
sb.append(", blocksize=");
sb.append(blocksize);
sb.append(blockSize);
sb.append(", encoding=");
sb.append(encoding);
sb.append(", includesMvcc=");

View File

@ -44,11 +44,11 @@ public class HFileContextBuilder {
/** Whether tags to be compressed or not **/
private boolean compressTags = false;
/** the checksum type **/
private ChecksumType checksumType = ChecksumType.getDefaultChecksumType();
private ChecksumType checkSumType = ChecksumType.getDefaultChecksumType();
/** the number of bytes per checksum value **/
private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;
/** Number of uncompressed bytes we allow per block. */
private int blocksize = HConstants.DEFAULT_BLOCKSIZE;
private int blockSize = HConstants.DEFAULT_BLOCKSIZE;
private DataBlockEncoding encoding = DataBlockEncoding.NONE;
/** Crypto context */
private Encryption.Context cryptoContext = Encryption.Context.NONE;
@ -71,9 +71,9 @@ public class HFileContextBuilder {
this.includesTags = hfc.isIncludesTags();
this.compression = hfc.getCompression();
this.compressTags = hfc.isCompressTags();
this.checksumType = hfc.getChecksumType();
this.checkSumType = hfc.getChecksumType();
this.bytesPerChecksum = hfc.getBytesPerChecksum();
this.blocksize = hfc.getBlocksize();
this.blockSize = hfc.getBlocksize();
this.encoding = hfc.getDataBlockEncoding();
this.cryptoContext = hfc.getEncryptionContext();
this.fileCreateTime = hfc.getFileCreateTime();
@ -109,7 +109,7 @@ public class HFileContextBuilder {
}
public HFileContextBuilder withChecksumType(ChecksumType checkSumType) {
this.checksumType = checkSumType;
this.checkSumType = checkSumType;
return this;
}
@ -119,7 +119,7 @@ public class HFileContextBuilder {
}
public HFileContextBuilder withBlockSize(int blockSize) {
this.blocksize = blockSize;
this.blockSize = blockSize;
return this;
}
@ -160,7 +160,7 @@ public class HFileContextBuilder {
public HFileContext build() {
return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression,
compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext,
compressTags, checkSumType, bytesPerChecksum, blockSize, encoding, cryptoContext,
fileCreateTime, hfileName, columnFamily, tableName, cellComparator);
}
}

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.net;
import java.net.InetSocketAddress;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;
/**
@ -95,11 +98,14 @@ public class Address implements Comparable<Address> {
*/
public String toStringWithoutDomain() {
String hostname = getHostName();
String[] parts = hostname.split("\\.");
if (parts.length > 1) {
for (String part : parts) {
List<String> parts = Splitter.on('.').splitToList(hostname);
if (parts.size() > 1) {
Iterator<String> i = parts.iterator();
String base = i.next();
while (i.hasNext()) {
String part = i.next();
if (!StringUtils.isNumeric(part)) {
return Address.fromParts(parts[0], getPort()).toString();
return Address.fromParts(base, getPort()).toString();
}
}
}

View File

@ -69,6 +69,7 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
ObjectUtil.checkPositive(refCnt(), REFERENCE_COUNT_NAME);
}
@Override
public int refCnt() {
return refCnt.refCnt();
}
@ -103,27 +104,16 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
/** Returns the total capacity of this ByteBuff. */
public abstract int capacity();
/**
* Returns the limit of this ByteBuff
* @return limit of the ByteBuff
*/
/** Returns the limit of this ByteBuff */
public abstract int limit();
/**
* Marks the limit of this ByteBuff. n * @return This ByteBuff
*/
/** Marks the limit of this ByteBuff */
public abstract ByteBuff limit(int limit);
/**
* Rewinds this ByteBuff and the position is set to 0
* @return this object
*/
/** Rewinds this ByteBuff and the position is set to 0 */
public abstract ByteBuff rewind();
/**
* Marks the current position of the ByteBuff
* @return this object
*/
/** Marks the current position of the ByteBuff */
public abstract ByteBuff mark();
/**
@ -156,16 +146,10 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
*/
public abstract void asSubByteBuffer(int offset, int length, ObjectIntPair<ByteBuffer> pair);
/**
* Returns the number of elements between the current position and the limit.
* @return the remaining elements in this ByteBuff
*/
/** Returns the number of elements between the current position and the limit. */
public abstract int remaining();
/**
* Returns true if there are elements between the current position and the limt
* @return true if there are elements, false otherwise
*/
/** Returns true if there are elements between the current position and the limit. */
public abstract boolean hasRemaining();
/**
@ -201,31 +185,34 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
/**
* Fetches the byte at the given index. Does not change position of the underlying ByteBuffers n
* * @return the byte at the given index
* @return the byte at the given index
*/
public abstract byte get(int index);
/**
* Fetches the byte at the given offset from current position. Does not change position of the
* underlying ByteBuffers. n * @return the byte value at the given index.
* underlying ByteBuffers.
* @return the byte value at the given index.
*/
public abstract byte getByteAfterPosition(int offset);
/**
* Writes a byte to this ByteBuff at the current position and increments the position n * @return
* this object
* Writes a byte to this ByteBuff at the current position and increments the position
* @return this object
*/
public abstract ByteBuff put(byte b);
/**
* Writes a byte to this ByteBuff at the given index nn * @return this object
* Writes a byte to this ByteBuff at the given index
* @return this object
*/
public abstract ByteBuff put(int index, byte b);
/**
* Copies the specified number of bytes from this ByteBuff's current position to the byte[]'s
* offset. Also advances the position of the ByteBuff by the given length. n * @param offset
* within the current array
* offset. Also advances the position of the ByteBuff by the given length.
* @param dst the byte[] to which the ByteBuff's content is to be copied
* @param offset within the current array
* @param length upto which the bytes to be copied
*/
public abstract void get(byte[] dst, int offset, int length);
@ -242,13 +229,15 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
/**
* Copies the content from this ByteBuff's current position to the byte array and fills it. Also
* advances the position of the ByteBuff by the length of the byte[]. n
* advances the position of the ByteBuff by the length of the byte[].
* @param dst the byte[] to which the ByteBuff's content is to be copied
*/
public abstract void get(byte[] dst);
/**
* Copies from the given byte[] to this ByteBuff n * @param offset the position in the byte array
* from which the copy should be done
* Copies from the given byte[] to this ByteBuff
* @param src source byte array
* @param offset the position in the byte array from which the copy should be done
* @param length the length upto which the copy should happen
* @return this ByteBuff
*/
@ -256,6 +245,8 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
/**
* Copies from the given byte[] to this ByteBuff n * @return this ByteBuff
* @param src source byte array
* @return this ByteBuff
*/
public abstract ByteBuff put(byte[] src);
@ -270,8 +261,7 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
/**
* Returns the short value at the current position. Also advances the position by the size of
* short
* @return the short value at the current position
* short.
*/
public abstract short getShort();
@ -291,43 +281,37 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
public abstract short getShortAfterPosition(int offset);
/**
* Returns the int value at the current position. Also advances the position by the size of int
* @return the int value at the current position
* Returns the int value at the current position. Also advances the position by the size of int.
*/
public abstract int getInt();
/**
* Writes an int to this ByteBuff at its current position. Also advances the position by size of
* int
* @param value Int value to write
* @return this object
* int.
*/
public abstract ByteBuff putInt(int value);
/**
* Fetches the int at the given index. Does not change position of the underlying ByteBuffers.
* Even if the current int does not fit in the current item we can safely move to the next item
* and fetch the remaining bytes forming the int n * @return the int value at the given index
* and fetch the remaining bytes forming the int.
*/
public abstract int getInt(int index);
/**
* Fetches the int value at the given offset from current position. Does not change position of
* the underlying ByteBuffers. n * @return the int value at the given index.
* the underlying ByteBuffers.
*/
public abstract int getIntAfterPosition(int offset);
/**
* Returns the long value at the current position. Also advances the position by the size of long
* @return the long value at the current position
* Returns the long value at the current position. Also advances the position by the size of long.
*/
public abstract long getLong();
/**
* Writes a long to this ByteBuff at its current position. Also advances the position by size of
* long
* @param value Long value to write
* @return this object
* long.
*/
public abstract ByteBuff putLong(long value);
@ -347,16 +331,13 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
/**
* Copy the content from this ByteBuff to a byte[].
* @return byte[] with the copied contents from this ByteBuff.
*/
public byte[] toBytes() {
return toBytes(0, this.limit());
}
/**
* Copy the content from this ByteBuff to a byte[] based on the given offset and length n * the
* position from where the copy should start n * the length upto which the copy has to be done
* @return byte[] with the copied contents from this ByteBuff.
* Copy the content from this ByteBuff to a byte[] based on the given offset and length.
*/
public abstract byte[] toBytes(int offset, int length);
@ -379,25 +360,16 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
*/
public abstract ByteBuff put(int offset, ByteBuff src, int srcOffset, int length);
/**
* Reads bytes from the given channel into this ByteBuff n * @return The number of bytes read from
* the channel n
*/
/** Reads bytes from the given channel into this ByteBuf. */
public abstract int read(ReadableByteChannel channel) throws IOException;
/**
* Reads bytes from FileChannel into this ByteBuff
*/
/** Reads bytes from FileChannel into this ByteBuff */
public abstract int read(FileChannel channel, long offset) throws IOException;
/**
* Write this ByteBuff's data into target file
*/
/** Write this ByteBuff's data into target file */
public abstract int write(FileChannel channel, long offset) throws IOException;
/**
* function interface for Channel read
*/
/** Functional interface for Channel read */
@FunctionalInterface
interface ChannelReader {
int read(ReadableByteChannel channel, ByteBuffer buf, long offset) throws IOException;
@ -438,10 +410,7 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
return (nBytes > 0) ? nBytes : ret;
}
/**
* Read integer from ByteBuff coded in 7 bits and increment position.
* @return Read integer.
*/
/** Read integer from ByteBuff coded in 7 bits and increment position. */
public static int readCompressedInt(ByteBuff buf) {
byte b = buf.get();
if ((b & ByteBufferUtils.NEXT_BIT_MASK) != 0) {
@ -560,9 +529,7 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
return refCnt;
}
/**
* Make this private because we don't want to expose the refCnt related wrap method to upstream.
*/
// Make this private because we don't want to expose the refCnt related wrap method to upstream.
private static ByteBuff wrap(List<ByteBuffer> buffers, RefCnt refCnt) {
if (buffers == null || buffers.size() == 0) {
throw new IllegalArgumentException("buffers shouldn't be null or empty");
@ -572,9 +539,7 @@ public abstract class ByteBuff implements HBaseReferenceCounted {
: new MultiByteBuff(refCnt, buffers.toArray(new ByteBuffer[0]));
}
/**
* Make this private because we don't want to expose the refCnt related wrap method to upstream.
*/
// Make this private because we don't want to expose the refCnt related wrap method to upstream.
private static ByteBuff wrap(ByteBuffer buffer, RefCnt refCnt) {
return new SingleByteBuff(refCnt, buffer);
}

View File

@ -578,9 +578,7 @@ public class MultiByteBuff extends ByteBuff {
/**
* Copies the specified number of bytes from this MBB's current position to the byte[]'s offset.
* Also advances the position of the MBB by the given length. n * @param offset within the current
* array
* @param length upto which the bytes to be copied
* Also advances the position of the MBB by the given length.
*/
@Override
public void get(byte[] dst, int offset, int length) {
@ -878,23 +876,16 @@ public class MultiByteBuff extends ByteBuff {
}
private static byte int0(int x) {
return (byte) (x);
return (byte) x;
}
/**
* Copies from the given byte[] to this MBB n * @return this MBB
*/
/** Copies from the given byte[] to this MBB */
@Override
public final MultiByteBuff put(byte[] src) {
return put(src, 0, src.length);
}
/**
* Copies from the given byte[] to this MBB n * @param offset the position in the byte array from
* which the copy should be done
* @param length the length upto which the copy should happen
* @return this MBB
*/
/** Copies from the given byte[] to this MBB. */
@Override
public MultiByteBuff put(byte[] src, int offset, int length) {
checkRefCount();
@ -965,7 +956,7 @@ public class MultiByteBuff extends ByteBuff {
}
private static byte long0(long x) {
return (byte) (x);
return (byte) x;
}
/**

View File

@ -64,6 +64,7 @@ public class RSGroupInfo {
}
/**
* Constructor
* @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information for a table will be
* stored in the configuration of a table so this will be removed.
*/
@ -79,52 +80,37 @@ public class RSGroupInfo {
this(src.name, src.servers, src.tables);
}
/**
* Get group name.
*/
/** Get group name. */
public String getName() {
return name;
}
/**
* Adds the given server to the group.
*/
/** Adds the given server to the group. */
public void addServer(Address hostPort) {
servers.add(hostPort);
}
/**
* Adds the given servers to the group.
*/
/** Adds the given servers to the group. */
public void addAllServers(Collection<Address> hostPort) {
servers.addAll(hostPort);
}
/**
* @param hostPort hostPort of the server
* @return true, if a server with hostPort is found
*/
/** Returns true if a server with hostPort is found */
public boolean containsServer(Address hostPort) {
return servers.contains(hostPort);
}
/**
* Get list of servers.
*/
/** Get list of servers. */
public Set<Address> getServers() {
return servers;
}
/**
* Remove given server from the group.
*/
/** Remove given server from the group. */
public boolean removeServer(Address hostPort) {
return servers.remove(hostPort);
}
/**
* Getter for fetching an unmodifiable {@link #configuration} map.
*/
/** Getter for fetching an unmodifiable {@link #configuration} map. */
public Map<String, String> getConfiguration() {
// shallow pointer copy
return Collections.unmodifiableMap(configuration);
@ -139,9 +125,7 @@ public class RSGroupInfo {
configuration.put(key, Objects.requireNonNull(value));
}
/**
* Remove a config setting represented by the key from the {@link #configuration} map
*/
/** Remove a config setting represented by the key from the {@link #configuration} map */
public void removeConfiguration(final String key) {
configuration.remove(key);
}
@ -157,6 +141,7 @@ public class RSGroupInfo {
}
/**
* Add a table
* @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in
* the configuration of a table so this will be removed.
*/
@ -166,6 +151,7 @@ public class RSGroupInfo {
}
/**
* Add a collection of tables
* @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in
* the configuration of a table so this will be removed.
*/
@ -175,6 +161,7 @@ public class RSGroupInfo {
}
/**
* Check if the group contains a table
* @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in
* the configuration of a table so this will be removed.
*/
@ -184,6 +171,7 @@ public class RSGroupInfo {
}
/**
* Remove a table
* @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in
* the configuration of a table so this will be removed.
*/
@ -215,12 +203,10 @@ public class RSGroupInfo {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
if (!(o instanceof RSGroupInfo)) {
return false;
}
RSGroupInfo rsGroupInfo = (RSGroupInfo) o;
if (!name.equals(rsGroupInfo.name)) {
return false;
}
@ -233,7 +219,6 @@ public class RSGroupInfo {
if (!configuration.equals(rsGroupInfo.configuration)) {
return false;
}
return true;
}

View File

@ -78,6 +78,7 @@ public final class Superusers {
}
/**
* Check if the current user is a super user
* @return true if current user is a super user (whether as user running process, declared as
* individual superuser or member of supergroup), false otherwise.
* @param user to check
@ -104,6 +105,7 @@ public final class Superusers {
}
/**
* Check if the current user is a super user
* @return true if current user is a super user, false otherwise.
* @param user to check
*/

View File

@ -139,7 +139,7 @@ public abstract class User {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
if (!(o instanceof User)) {
return false;
}
return ugi.equals(((User) o).ugi);
@ -155,9 +155,7 @@ public abstract class User {
return ugi.toString();
}
/**
* Returns the {@code User} instance within current execution context.
*/
/** Returns the {@code User} instance within current execution context. */
public static User getCurrent() throws IOException {
User user = new SecureHadoopUser();
if (user.getUGI() == null) {
@ -166,9 +164,7 @@ public abstract class User {
return user;
}
/**
* Executes the given action as the login user n * @return the result of the action n
*/
/** Executes the given action as the login user */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static <T> T runAsLoginUser(PrivilegedExceptionAction<T> action) throws IOException {
try {
@ -324,7 +320,10 @@ public abstract class User {
return ugi.doAs(action);
}
/** @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[]) */
/**
* Create a user for testing.
* @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[])
*/
public static User createUserForTesting(Configuration conf, String name, String[] groups) {
synchronized (UserProvider.class) {
if (!(UserProvider.groups instanceof TestingGroups)) {
@ -365,9 +364,7 @@ public abstract class User {
}
}
/**
* Returns the result of {@code UserGroupInformation.isSecurityEnabled()}.
*/
/** Returns the result of {@code UserGroupInformation.isSecurityEnabled()}. */
public static boolean isSecurityEnabled() {
return UserGroupInformation.isSecurityEnabled();
}

View File

@ -141,7 +141,7 @@ public class UserProvider extends BaseConfigurable {
}
/**
* @return the userName for the current logged-in user.
* Returns the userName for the current logged-in user.
* @throws IOException if the underlying user cannot be obtained
*/
public String getCurrentUserName() throws IOException {
@ -155,9 +155,9 @@ public class UserProvider extends BaseConfigurable {
}
/**
* @return whether or not Kerberos authentication is configured for Hadoop. For non-secure Hadoop,
* this always returns <code>false</code>. For secure Hadoop, it will return the value
* from {@code UserGroupInformation.isSecurityEnabled()}.
* Return whether or not Kerberos authentication is configured for Hadoop. For non-secure Hadoop,
* this always returns <code>false</code>. For secure Hadoop, it will return the value from
* {@code UserGroupInformation.isSecurityEnabled()}.
*/
public boolean isHadoopSecurityEnabled() {
return User.isSecurityEnabled();
@ -172,7 +172,7 @@ public class UserProvider extends BaseConfigurable {
}
/**
* @return the current user within the current execution context
* Return the current user within the current execution context
* @throws IOException if the user cannot be loaded
*/
public User getCurrent() throws IOException {

View File

@ -38,6 +38,7 @@ import org.apache.yetus.audience.InterfaceStability;
*/
@InterfaceAudience.Private
@InterfaceStability.Stable
@SuppressWarnings({ "unchecked", "rawtypes", "hiding", "TypeParameterShadowing" })
public class CopyOnWriteArrayMap<K, V> extends AbstractMap<K, V>
implements Map<K, V>, ConcurrentNavigableMap<K, V> {
private final Comparator<? super K> keyComparator;

View File

@ -35,6 +35,7 @@ public class Addressing {
public static final String HOSTNAME_PORT_SEPARATOR = ":";
/**
* Create a socket address
* @param hostAndPort Formatted as <code>&lt;hostname&gt; ':' &lt;port&gt;</code>
* @return An InetSocketInstance
*/
@ -44,6 +45,7 @@ public class Addressing {
}
/**
* Create a host-and-port string
* @param hostname Server hostname
* @param port Server port
* @return Returns a concatenation of <code>hostname</code> and <code>port</code> in following
@ -56,6 +58,7 @@ public class Addressing {
}
/**
* Parse the hostname portion of a host-and-port string
* @param hostAndPort Formatted as <code>&lt;hostname&gt; ':' &lt;port&gt;</code>
* @return The hostname portion of <code>hostAndPort</code>
*/
@ -68,6 +71,7 @@ public class Addressing {
}
/**
* Parse the port portion of a host-and-port string
* @param hostAndPort Formatted as <code>&lt;hostname&gt; ':' &lt;port&gt;</code>
* @return The port portion of <code>hostAndPort</code>
*/
@ -163,9 +167,7 @@ public class Addressing {
: address.toString();
}
/**
* Interface for AddressSelectionCondition to check if address is acceptable
*/
/** Interface for AddressSelectionCondition to check if address is acceptable */
public interface AddressSelectionCondition {
/**
* Condition on which to accept inet address

View File

@ -84,6 +84,7 @@ public final class AvlUtil {
@InterfaceAudience.Private
public static interface AvlNodeVisitor<TNode extends AvlNode> {
/**
* Visitor
* @param node the node that we are currently visiting
* @return false to stop the iteration. true to continue.
*/
@ -96,6 +97,7 @@ public final class AvlUtil {
@InterfaceAudience.Private
public static class AvlTree {
/**
* Return the node that matches the specified key or null in case of node not found.
* @param root the current root of the tree
* @param key the key for the node we are trying to find
* @param keyComparator the comparator to use to match node and key
@ -117,6 +119,7 @@ public final class AvlUtil {
}
/**
* Return the first node of the tree.
* @param root the current root of the tree
* @return the first (min) node of the tree
*/
@ -130,6 +133,7 @@ public final class AvlUtil {
}
/**
* Return the last node of the tree.
* @param root the current root of the tree
* @return the last (max) node of the tree
*/
@ -325,6 +329,7 @@ public final class AvlUtil {
/**
* Create the iterator starting from the first (min) node of the tree
* @param root the current root of the tree
*/
public AvlTreeIterator(final TNode root) {
seekFirst(root);
@ -448,6 +453,7 @@ public final class AvlUtil {
@InterfaceAudience.Private
public static class AvlIterableList {
/**
* Return the successor of the current node
* @param node the current node
* @return the successor of the current node
*/
@ -456,6 +462,7 @@ public final class AvlUtil {
}
/**
* Return the predecessor of the current node
* @param node the current node
* @return the predecessor of the current node
*/
@ -464,6 +471,7 @@ public final class AvlUtil {
}
/**
* Prepend a node to the tree
* @param head the head of the linked list
* @param node the node to add to the front of the list
* @return the new head of the list
@ -484,6 +492,7 @@ public final class AvlUtil {
}
/**
* Append a node to the tree
* @param head the head of the linked list
* @param node the node to add to the tail of the list
* @return the new head of the list
@ -504,6 +513,7 @@ public final class AvlUtil {
}
/**
* Append a list of nodes to the tree
* @param head the head of the current linked list
* @param otherHead the head of the list to append to the current list
* @return the new head of the current list
@ -522,6 +532,7 @@ public final class AvlUtil {
}
/**
* Remove a node from the tree
* @param head the head of the linked list
* @param node the node to remove from the list
* @return the new head of the list
@ -541,6 +552,7 @@ public final class AvlUtil {
}
/**
* Prepend a node to the tree before a specific node
* @param head the head of the linked list
* @param base the node which we want to add the {@code node} before it
* @param node the node which we want to add it before the {@code base} node
@ -554,10 +566,7 @@ public final class AvlUtil {
return head == base ? node : head;
}
/**
* @param node the node to check
* @return true if the node is linked to a list, false otherwise
*/
/** Return true if the node is linked to a list, false otherwise */
public static <TNode extends AvlLinkedNode> boolean isLinked(TNode node) {
return node.iterPrev != null && node.iterNext != null;
}

View File

@ -143,6 +143,7 @@ public class ByteBufferArray {
* Transfer bytes from source {@link ByteBuff} to destination {@link ByteBuffer}. Position of both
* source and destination will be advanced.
*/
@SuppressWarnings("UnnecessaryLambda")
private static final BiConsumer<ByteBuffer, ByteBuff> WRITER = (dst, src) -> {
int off = src.position(), len = dst.remaining();
src.get(dst, off, len);
@ -153,6 +154,7 @@ public class ByteBufferArray {
* Transfer bytes from source {@link ByteBuffer} to destination {@link ByteBuff}, Position of both
* source and destination will be advanced.
*/
@SuppressWarnings("UnnecessaryLambda")
private static final BiConsumer<ByteBuffer, ByteBuff> READER = (src, dst) -> {
int off = dst.position(), len = src.remaining(), srcOff = src.position();
dst.put(off, ByteBuff.wrap(src), srcOff, len);

View File

@ -58,6 +58,7 @@ import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUti
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value = "EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
justification = "It has been like this forever")
@SuppressWarnings("MixedMutabilityReturnType")
public class Bytes implements Comparable<Bytes> {
// Using the charset canonical name for String/byte[] conversions is much
@ -186,16 +187,12 @@ public class Bytes implements Comparable<Bytes> {
return this.bytes;
}
/**
* @param b Use passed bytes as backing array for this instance.
*/
/** Use passed bytes as backing array for this instance. */
public void set(final byte[] b) {
set(b, 0, b.length);
}
/**
* @param b Use passed bytes as backing array for this instance. nn
*/
/** Use passed bytes as backing array for this instance. */
public void set(final byte[] b, final int offset, final int length) {
this.bytes = b;
this.offset = offset;
@ -211,9 +208,7 @@ public class Bytes implements Comparable<Bytes> {
return this.length;
}
/**
* n
*/
/** Return the offset into the buffer. */
public int getOffset() {
return this.offset;
}
@ -243,9 +238,6 @@ public class Bytes implements Comparable<Bytes> {
return BYTES_RAWCOMPARATOR.compare(this.bytes, this.offset, this.length, that, 0, that.length);
}
/**
* @see Object#equals(Object)
*/
@Override
public boolean equals(Object right_obj) {
if (right_obj instanceof byte[]) {
@ -257,15 +249,13 @@ public class Bytes implements Comparable<Bytes> {
return false;
}
/**
* @see Object#toString()
*/
@Override
public String toString() {
return Bytes.toString(bytes, offset, length);
}
/**
* Convert a list of byte[] to an array
* @param array List of byte [].
* @return Array of byte [].
*/
@ -278,21 +268,15 @@ public class Bytes implements Comparable<Bytes> {
return results;
}
/**
* Returns a copy of the bytes referred to by this writable
*/
/** Returns a copy of the bytes referred to by this writable */
public byte[] copyBytes() {
return Arrays.copyOfRange(bytes, offset, offset + length);
}
/**
* Byte array comparator class.
*/
/** Byte array comparator class. */
@InterfaceAudience.Public
public static class ByteArrayComparator implements RawComparator<byte[]> {
/**
* Constructor
*/
public ByteArrayComparator() {
super();
}
@ -338,14 +322,10 @@ public class Bytes implements Comparable<Bytes> {
}
}
/**
* Pass this to TreeMaps where byte [] are keys.
*/
/** Pass this to TreeMaps where byte [] are keys. */
public final static Comparator<byte[]> BYTES_COMPARATOR = new ByteArrayComparator();
/**
* Use comparing byte arrays, byte-by-byte
*/
/** Use comparing byte arrays, byte-by-byte */
public final static RawComparator<byte[]> BYTES_RAWCOMPARATOR = new ByteArrayComparator();
/**
@ -485,6 +465,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Convert a byte[] into a string. Charset is assumed to be UTF-8.
* @param b Presumed UTF-8 encoded byte array.
* @return String made from <code>b</code>
*/
@ -768,7 +749,8 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Presumes float encoded as IEEE 754 floating-point "single format"
* Put a float value out to the specified byte array position. Presumes float encoded as IEEE 754
* floating-point "single format"
* @param bytes byte array
* @return Float made from passed byte array.
*/
@ -777,7 +759,8 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Presumes float encoded as IEEE 754 floating-point "single format"
* Put a float value out to the specified byte array position. Presumes float encoded as IEEE 754
* floating-point "single format"
* @param bytes array to convert
* @param offset offset into array
* @return Float made from passed byte array.
@ -787,6 +770,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Put a float value out to the specified byte array position.
* @param bytes byte array
* @param offset offset to write to
* @param f float value
@ -796,33 +780,24 @@ public class Bytes implements Comparable<Bytes> {
return putInt(bytes, offset, Float.floatToRawIntBits(f));
}
/**
* @param f float value
* @return the float represented as byte []
*/
/** Return the float represented as byte[] */
public static byte[] toBytes(final float f) {
// Encode it as int
return Bytes.toBytes(Float.floatToRawIntBits(f));
}
/**
* @param bytes byte array
* @return Return double made from passed bytes.
*/
/** Return double made from passed bytes. */
public static double toDouble(final byte[] bytes) {
return toDouble(bytes, 0);
}
/**
* @param bytes byte array
* @param offset offset where double is
* @return Return double made from passed bytes.
*/
/** Return double made from passed bytes. */
public static double toDouble(final byte[] bytes, final int offset) {
return Double.longBitsToDouble(toLong(bytes, offset, SIZEOF_LONG));
}
/**
* Put a double value out to the specified byte array position as the IEEE 754 double format.
* @param bytes byte array
* @param offset offset to write to
* @param d value
@ -1031,9 +1006,7 @@ public class Bytes implements Comparable<Bytes> {
return offset + SIZEOF_SHORT;
}
/**
* Convert a BigDecimal value to a byte array n * @return the byte array
*/
/** Convert a BigDecimal value to a byte array */
public static byte[] toBytes(BigDecimal val) {
byte[] valueBytes = val.unscaledValue().toByteArray();
byte[] result = new byte[valueBytes.length + SIZEOF_INT];
@ -1042,16 +1015,12 @@ public class Bytes implements Comparable<Bytes> {
return result;
}
/**
* Converts a byte array to a BigDecimal n * @return the char value
*/
/** Converts a byte array to a BigDecimal */
public static BigDecimal toBigDecimal(byte[] bytes) {
return toBigDecimal(bytes, 0, bytes.length);
}
/**
* Converts a byte array to a BigDecimal value nnn * @return the char value
*/
/** Converts a byte array to a BigDecimal value */
public static BigDecimal toBigDecimal(byte[] bytes, int offset, final int length) {
if (bytes == null || length < SIZEOF_INT + 1 || (offset + length > bytes.length)) {
return null;
@ -1082,6 +1051,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Encode a long value as a variable length integer.
* @param vint Integer to make a vint of.
* @return Vint as bytes array.
*/
@ -1120,6 +1090,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Reads a zero-compressed encoded long from input buffer and returns it.
* @param buffer buffer to convert
* @return vint bytes as an integer.
*/
@ -1161,6 +1132,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Lexicographically compare two arrays.
* @param left left operand
* @param right right operand
* @return 0 if equal, &lt; 0 if left is less than right, etc.
@ -1480,6 +1452,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Lexicographically determine the equality of two arrays.
* @param left left operand
* @param right right operand
* @return True if equal
@ -1500,6 +1473,16 @@ public class Bytes implements Comparable<Bytes> {
return compareTo(left, right) == 0;
}
/**
* Lexicographically determine the equality of two arrays.
* @param left left operand
* @param leftOffset offset into left operand
* @param leftLen length of left operand
* @param right right operand
* @param rightOffset offset into right operand
* @param rightLen length of right operand
* @return True if equal
*/
public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right,
int rightOffset, int rightLen) {
// short circuit case
@ -1524,6 +1507,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Lexicographically determine the equality of two byte[], one as ByteBuffer.
* @param a left operand
* @param buf right operand
* @return True if equal
@ -1553,6 +1537,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Calculate a hash code from a given byte array.
* @param b bytes to hash
* @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the passed in array. This
* method is what {@link org.apache.hadoop.io.Text} use calculating hash code.
@ -1562,6 +1547,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Calculate a hash code from a given byte array.
* @param b value
* @param length length of the value
* @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the passed in array. This
@ -1572,6 +1558,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Calculate a hash code from a given byte array suitable for use as a key in maps.
* @param b bytes to hash
* @return A hash of <code>b</code> as an Integer that can be used as key in Maps.
*/
@ -1580,6 +1567,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Calculate a hash code from a given byte array suitable for use as a key in maps.
* @param b bytes to hash
* @param length length to hash
* @return A hash of <code>b</code> as an Integer that can be used as key in Maps.
@ -1589,6 +1577,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Concatenate byte arrays.
* @param a lower half
* @param b upper half
* @return New array that has a in lower half and b in upper half.
@ -1598,6 +1587,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Concatenate byte arrays.
* @param a first third
* @param b second third
* @param c third third
@ -1612,6 +1602,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Concatenate byte arrays.
* @param arrays all the arrays to concatenate together.
* @return New array made from the concatenation of the given arrays.
*/
@ -1630,6 +1621,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Make a new byte array from a subset of bytes at the head of another.
* @param a array
* @param length amount of bytes to grab
* @return First <code>length</code> bytes from <code>a</code>
@ -1644,6 +1636,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Make a new byte array from a subset of bytes at the tail of another.
* @param a array
* @param length amount of bytes to snarf
* @return Last <code>length</code> bytes from <code>a</code>
@ -1658,6 +1651,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Make a new byte array from a subset of bytes at the head of another, zero padded as desired.
* @param a array
* @param length new array size
* @return Value in <code>a</code> plus <code>length</code> prepended 0 bytes
@ -1671,6 +1665,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Make a new byte array from a subset of bytes at the tail of another, zero padded as desired.
* @param a array
* @param length new array size
* @return Value in <code>a</code> plus <code>length</code> appended 0 bytes
@ -1816,6 +1811,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Calculate the hash code for a given range of bytes.
* @param bytes array to hash
* @param offset offset to start from
* @param length length to hash
@ -1828,6 +1824,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Create an array of byte[] given an array of String.
* @param t operands
* @return Array of byte arrays made from passed array of Text
*/
@ -1840,6 +1837,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Create an array of byte[] given an array of String.
* @param t operands
* @return Array of binary byte arrays made from passed array of binary strings
*/
@ -1852,6 +1850,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Create a byte[][] where first and only entry is <code>column</code>
* @param column operand
* @return A byte array of a byte array where first and only entry is <code>column</code>
*/
@ -1860,6 +1859,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Create a byte[][] where first and only entry is <code>column</code>
* @param column operand
* @return A byte array of a byte array where first and only entry is <code>column</code>
*/
@ -1995,7 +1995,7 @@ public class Bytes implements Comparable<Bytes> {
for (int i = 0; i < value.length; i++) {
int cur = ((int) amo % 256) * sign;
amo = (amo >> 8);
int val = ((~value[value.length - i - 1]) & 0x0ff) + 1;
int val = (~value[value.length - i - 1] & 0x0ff) + 1;
int total = cur - val;
if (total >= 0) {
amo += sign;
@ -2208,6 +2208,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Return true if target is present as an element anywhere in the given array.
* @param array an array of {@code byte} values, possibly empty
* @param target a primitive {@code byte} value
* @return {@code true} if {@code target} is present as an element anywhere in {@code array}.
@ -2217,6 +2218,7 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Return true if target is present as an element anywhere in the given array.
* @param array an array of {@code byte} values, possibly empty
* @param target an array of {@code byte}
* @return {@code true} if {@code target} is present anywhere in {@code array}

View File

@ -396,7 +396,7 @@ public class ClassSize {
* @return the size estimate, in bytes
*/
private static long estimateBaseFromCoefficients(int[] coeff, boolean debug) {
long prealign_size = OBJECT + coeff[0] + coeff[2] * REFERENCE;
long prealign_size = (long) OBJECT + coeff[0] + coeff[2] * REFERENCE;
// Round up to a multiple of 8
long size = align(prealign_size) + align(coeff[1] * ARRAY);
@ -429,7 +429,7 @@ public class ClassSize {
* @return smallest number &gt;= input that is a multiple of 8
*/
public static int align(int num) {
return (int) (align((long) num));
return (int) align((long) num);
}
/**

View File

@ -75,7 +75,7 @@ public final class CommonFSUtils {
*/
public static boolean isStartingWithPath(final Path rootPath, final String path) {
String uriRootPath = rootPath.toUri().getPath();
String tailUriPath = (new Path(path)).toUri().getPath();
String tailUriPath = new Path(path).toUri().getPath();
return tailUriPath.startsWith(uriRootPath);
}
@ -280,6 +280,7 @@ public final class CommonFSUtils {
}
/**
* Get the path for the root data directory
* @param c configuration
* @return {@link Path} to hbase root directory from configuration as a qualified Path.
* @throws IOException e
@ -308,6 +309,7 @@ public final class CommonFSUtils {
}
/**
* Get the path for the root directory for WAL data
* @param c configuration
* @return {@link Path} to hbase log root directory: e.g. {@value HBASE_WAL_DIR} from
* configuration as a qualified Path. Defaults to HBase root dir.
@ -550,8 +552,7 @@ public final class CommonFSUtils {
}
/**
* @param conf must not be null
* @return True if this filesystem whose scheme is 'hdfs'.
* Return true if this is a filesystem whose scheme is 'hdfs'.
* @throws IOException from underlying FileSystem
*/
public static boolean isHDFS(final Configuration conf) throws IOException {
@ -570,8 +571,7 @@ public final class CommonFSUtils {
}
/**
* @param conf must not be null
* @return Returns the filesystem of the hbase rootdir.
* Returns the filesystem of the hbase rootdir.
* @throws IOException from underlying FileSystem
*/
public static FileSystem getCurrentFileSystem(Configuration conf) throws IOException {

View File

@ -43,6 +43,7 @@ public final class DNS {
private static Method GET_DEFAULT_HOST_METHOD;
/**
* Hostname configuration key
* @deprecated since 2.4.0 and will be removed in 4.0.0. Use {@link DNS#UNSAFE_RS_HOSTNAME_KEY}
* instead.
* @see <a href="https://issues.apache.org/jira/browse/HBASE-24667">HBASE-24667</a>
@ -65,9 +66,9 @@ public final class DNS {
MASTER("master"),
REGIONSERVER("regionserver");
private String name;
private final String name;
ServerType(String name) {
private ServerType(String name) {
this.name = name;
}

View File

@ -46,9 +46,7 @@ public final class ExceptionUtil {
return (t instanceof InterruptedIOException || t instanceof ClosedByInterruptException);
}
/**
* @throws InterruptedIOException if t was an interruption. Does nothing otherwise.
*/
/** Throw InterruptedIOException if t was an interruption, nothing otherwise. */
public static void rethrowIfInterrupt(Throwable t) throws InterruptedIOException {
InterruptedIOException iie = asInterrupt(t);

View File

@ -24,7 +24,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* exponentially. It brings benefits that it is more sensitive, and can see the trends easily.
*/
@InterfaceAudience.Private
public class ExponentialMovingAverage extends WindowMovingAverage {
public class ExponentialMovingAverage<T> extends WindowMovingAverage<T> {
private double alpha;
private double previousAverage;
private double currentAverage;

View File

@ -94,7 +94,6 @@ public abstract class Hash {
/**
* Calculate a hash using bytes from HashKey and the provided seed value.
* @param <T>
* @param hashKey key to extract the hash
* @param initval the seed value
* @return hash value

View File

@ -21,7 +21,6 @@ import org.apache.yetus.audience.InterfaceAudience;
/**
* Used to calculate the hash {@link Hash} algorithms for Bloomfilters.
* @param <T> the type of HashKey
*/
@InterfaceAudience.Private
public abstract class HashKey<T> {
@ -31,9 +30,7 @@ public abstract class HashKey<T> {
this.t = t;
}
/**
* n * @return The byte at the given position in this HashKey
*/
/** Return The byte at the given position in this HashKey */
public abstract byte get(int pos);
/** Returns The number of bytes in this HashKey */

View File

@ -27,12 +27,12 @@ import org.apache.hbase.thirdparty.com.google.common.escape.Escapers;
/**
* Utility class for converting objects to JRuby. It handles null, Boolean, Number, String, byte[],
* List&lt;Object>, Map&lt;String, Object> structures.
* List&lt;Object&gt;, Map&lt;String, Object&gt; structures.
* <p>
* E.g.
*
* <pre>
* Map&lt;String, Object> map = new LinkedHashMap&lt;>();
* Map&lt;String, Object&gt; map = new LinkedHashMap&lt;&gt;();
* map.put("null", null);
* map.put("boolean", true);
* map.put("number", 1);
@ -45,8 +45,8 @@ import org.apache.hbase.thirdparty.com.google.common.escape.Escapers;
* Calling {@link #print(Object)} method will result:
*
* <pre>
* { null => '', boolean => 'true', number => '1', string => 'str',
* binary => '010203', list => [ '1', '2', 'true' ] }
* { null =&gt; '', boolean =&gt; 'true', number =&gt; '1', string =&gt; 'str',
* binary =&gt; '010203', list =&gt; [ '1', '2', 'true' ] }
* </pre>
* </p>
*/

View File

@ -26,10 +26,13 @@ import java.lang.management.OperatingSystemMXBean;
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
/**
* This class is a wrapper for the implementation of com.sun.management.UnixOperatingSystemMXBean It
* will decide to use the sun api or its own implementation depending on the runtime (vendor) used.
@ -167,11 +170,10 @@ public class JVM {
// need to get the PID number of the process first
RuntimeMXBean rtmbean = ManagementFactory.getRuntimeMXBean();
String rtname = rtmbean.getName();
String[] pidhost = rtname.split("@");
Iterator<String> pidhost = Splitter.on('@').split(rtname).iterator();
// using linux bash commands to retrieve info
Process p = Runtime.getRuntime()
.exec(new String[] { "bash", "-c", "ls /proc/" + pidhost[0] + "/fdinfo | wc -l" });
.exec(new String[] { "bash", "-c", "ls /proc/" + pidhost.next() + "/fdinfo | wc -l" });
inputStream = p.getInputStream();
inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
bufferedReader = new BufferedReader(inputStreamReader);
@ -208,6 +210,7 @@ public class JVM {
}
/**
* Get the system load average
* @see java.lang.management.OperatingSystemMXBean#getSystemLoadAverage
*/
public double getSystemLoadAverage() {
@ -215,9 +218,9 @@ public class JVM {
}
/**
* @return the physical free memory (not the JVM one, as it's not very useful as it depends on the
* GC), but the one from the OS as it allows a little bit more to guess if the machine is
* overloaded or not).
* Return the physical free memory (not the JVM one, as it's not very useful as it depends on the
* GC), but the one from the OS as it allows a little bit more to guess if the machine is
* overloaded or not).
*/
public long getFreeMemory() {
if (ibmvendor) {

View File

@ -21,15 +21,12 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.codec.binary.Hex;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class for MD5 MD5 hash produces a 128-bit digest.
*/
@InterfaceAudience.Public
public class MD5Hash {
private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class);
/**
* Given a byte array, returns in MD5 hash as a hex string. n * @return SHA1 hash as a 32

View File

@ -643,7 +643,6 @@ public class OrderedBytes {
byte[] a = dst.getBytes();
boolean isNeg = val.signum() == -1;
final int offset = dst.getOffset(), start = dst.getPosition();
int e = 0, startM;
if (isNeg) { /* Small negative number: 0x14, -E, ~M */
dst.put(NEG_SMALL);
@ -655,13 +654,13 @@ public class OrderedBytes {
int zerosBeforeFirstNonZero = abs.scale() - abs.precision();
int lengthToMoveRight =
zerosBeforeFirstNonZero % 2 == 0 ? zerosBeforeFirstNonZero : zerosBeforeFirstNonZero - 1;
e = lengthToMoveRight / 2;
int e = lengthToMoveRight / 2;
abs = abs.movePointRight(lengthToMoveRight);
putVaruint64(dst, e, !isNeg); // encode appropriate E value.
// encode M by peeling off centimal digits, encoding x as 2x+1
startM = dst.getPosition();
int startM = dst.getPosition();
encodeToCentimal(dst, abs);
// terminal digit should be 2x
a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe);
@ -695,8 +694,8 @@ public class OrderedBytes {
* calling function.
*
* <pre>
* Encoding: M (if E<=10)
* E M (if E>10)
* Encoding: M (if E&lt;=10)
* E M (if E&gt;10)
* </pre>
* </p>
* @param dst The destination to which encoded digits are written.
@ -709,7 +708,6 @@ public class OrderedBytes {
byte[] a = dst.getBytes();
boolean isNeg = val.signum() == -1;
final int start = dst.getPosition(), offset = dst.getOffset();
int e = 0, startM;
if (isNeg) { /* Large negative number: 0x08, ~E, ~M */
dst.put(NEG_LARGE);
@ -720,7 +718,7 @@ public class OrderedBytes {
// normalize abs(val) to determine E
int integerDigits = abs.precision() - abs.scale();
int lengthToMoveLeft = integerDigits % 2 == 0 ? integerDigits : integerDigits + 1;
e = lengthToMoveLeft / 2;
int e = lengthToMoveLeft / 2;
abs = abs.movePointLeft(lengthToMoveLeft);
// encode appropriate header byte and/or E value.
@ -735,7 +733,7 @@ public class OrderedBytes {
}
// encode M by peeling off centimal digits, encoding x as 2x+1
startM = dst.getPosition();
int startM = dst.getPosition();
encodeToCentimal(dst, abs);
// terminal digit should be 2x
a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe);
@ -748,7 +746,7 @@ public class OrderedBytes {
/**
* Encode a value val in [0.01, 1.0) into Centimals. Util function for
* {@link OrderedBytes#encodeNumericLarge(PositionedByteRange, BigDecimal) and
* {@link OrderedBytes#encodeNumericLarge(PositionedByteRange, BigDecimal)} and
* {@link OrderedBytes#encodeNumericSmall(PositionedByteRange, BigDecimal)}
* @param dst The destination to which encoded digits are written.
* @param val A BigDecimal after the normalization. The value must be in [0.01, 1.0).

View File

@ -22,8 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience;
/**
* A generic class for pairs.
* @param <T1>
* @param <T2>
*/
@InterfaceAudience.Public
public class Pair<T1, T2> implements Serializable {
@ -102,9 +100,13 @@ public class Pair<T1, T2> implements Serializable {
@Override
public int hashCode() {
if (first == null) return (second == null) ? 0 : second.hashCode() + 1;
else if (second == null) return first.hashCode() + 2;
else return first.hashCode() * 17 + second.hashCode();
if (first == null) {
return (second == null) ? 0 : second.hashCode() + 1;
} else if (second == null) {
return first.hashCode() + 2;
} else {
return first.hashCode() * 17 + second.hashCode();
}
}
@Override

View File

@ -24,7 +24,6 @@ import org.apache.yetus.audience.InterfaceAudience;
/**
* A generic, immutable class for pairs of objects both of type <code>T</code>.
* @param <T>
* @see Pair if Types differ.
*/
@InterfaceAudience.Public

View File

@ -34,7 +34,7 @@ import org.slf4j.Logger;
@InterfaceAudience.Private
public class ReflectionUtils {
@SuppressWarnings("unchecked")
@SuppressWarnings({ "unchecked", "TypeParameterUnusedInFormals" })
public static <T> T instantiateWithCustomCtor(String className, Class<?>[] ctorArgTypes,
Object[] ctorArgs) {
try {
@ -63,7 +63,7 @@ public class ReflectionUtils {
}
}
@SuppressWarnings("unchecked")
@SuppressWarnings({ "unchecked", "TypeParameterUnusedInFormals" })
public static <T> T newInstance(String className, Object... params) {
Class<T> type;
try {
@ -107,7 +107,7 @@ public class ReflectionUtils {
match = !ctorParamTypes[i].isPrimitive();
} else {
Class<?> paramType = paramTypes[i].getClass();
match = (!ctorParamTypes[i].isPrimitive())
match = !ctorParamTypes[i].isPrimitive()
? ctorParamTypes[i].isAssignableFrom(paramType)
: ((int.class.equals(ctorParamTypes[i]) && Integer.class.equals(paramType))
|| (long.class.equals(ctorParamTypes[i]) && Long.class.equals(paramType))

View File

@ -23,7 +23,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* SMA measure the overall average execution time of a specific method.
*/
@InterfaceAudience.Private
public class SimpleMovingAverage extends MovingAverage {
public class SimpleMovingAverage<T> extends MovingAverage<T> {
private double averageTime = 0.0;
protected long count = 0;
@ -35,7 +35,7 @@ public class SimpleMovingAverage extends MovingAverage {
@Override
public void updateMostRecentTime(long elapsed) {
averageTime += (elapsed - averageTime) / (++count);
averageTime += (elapsed - averageTime) / ++count;
}
@Override

View File

@ -101,9 +101,7 @@ public class Threads {
}
}
/**
* @param t Waits on the passed thread to die dumping a threaddump every minute while its up. n
*/
/** Waits on the passed thread to die dumping a threaddump every minute while its up. */
public static void threadDumpingIsAlive(final Thread t) throws InterruptedException {
if (t == null) {
return;

View File

@ -61,11 +61,11 @@ public class Triple<A, B, C> {
Triple<?, ?, ?> otherTriple = (Triple<?, ?, ?>) obj;
if (first != otherTriple.first && (first != null && !(first.equals(otherTriple.first))))
if (first != otherTriple.first && (first != null && !first.equals(otherTriple.first)))
return false;
if (second != otherTriple.second && (second != null && !(second.equals(otherTriple.second))))
if (second != otherTriple.second && (second != null && !second.equals(otherTriple.second)))
return false;
if (third != otherTriple.third && (third != null && !(third.equals(otherTriple.third))))
if (third != otherTriple.third && (third != null && !third.equals(otherTriple.third)))
return false;
return true;

View File

@ -19,12 +19,16 @@ package org.apache.hadoop.hbase.util;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.Version;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
/**
* This class finds the Version information for HBase.
*/
@ -137,9 +141,9 @@ public class VersionInfo {
*/
private static String[] getVersionComponents(final String version) {
assert (version != null);
String[] strComps = version.split("[\\.-]");
List<String> list = Splitter.onPattern("[\\.-]").splitToList(version);
String[] strComps = list.toArray(new String[list.size()]);
assert (strComps.length > 0);
String[] comps = new String[strComps.length];
for (int i = 0; i < strComps.length; ++i) {
if (StringUtils.isNumeric(strComps[i])) {
@ -162,7 +166,7 @@ public class VersionInfo {
}
public static int getMajorVersion(String version) {
return Integer.parseInt(version.split("\\.")[0]);
return Integer.parseInt(Iterables.get(Splitter.on('.').split(version), 0));
}
public static void main(String[] args) {

View File

@ -24,7 +24,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* weight. And it is based on {@link WindowMovingAverage}, such that it only focus on the last N.
*/
@InterfaceAudience.Private
public class WeightedMovingAverage extends WindowMovingAverage {
public class WeightedMovingAverage<T> extends WindowMovingAverage<T> {
private int[] coefficient;
private int denominator;
@ -53,8 +53,8 @@ public class WeightedMovingAverage extends WindowMovingAverage {
int coIndex = 0;
int length = getNumberOfStatistics();
// tmIndex, it points to the oldest data.
for (int tmIndex = (getMostRecentPosistion() + 1) % length; coIndex
< length; coIndex++, tmIndex = (++tmIndex) % length) {
for (int tmIndex = (getMostRecentPosition() + 1) % length; coIndex
< length; coIndex++, tmIndex = ++tmIndex % length) {
// start the multiplication from oldest to newest
average += coefficient[coIndex] * getStatisticsAtIndex(tmIndex);
}

View File

@ -24,7 +24,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* in a circle array.
*/
@InterfaceAudience.Private
public class WindowMovingAverage extends MovingAverage {
public class WindowMovingAverage<T> extends MovingAverage<T> {
protected final static int DEFAULT_SIZE = 5;
// The last n statistics.
@ -47,7 +47,7 @@ public class WindowMovingAverage extends MovingAverage {
@Override
protected void updateMostRecentTime(long elapsed) {
int index = moveForwardMostRecentPosistion();
int index = moveForwardMostRecentPosition();
lastN[index] = elapsed;
}
@ -55,7 +55,7 @@ public class WindowMovingAverage extends MovingAverage {
public double getAverageTime() {
return enoughStatistics()
? (double) sum(getNumberOfStatistics()) / getNumberOfStatistics()
: (double) sum(getMostRecentPosistion() + 1) / (getMostRecentPosistion() + 1);
: (double) sum(getMostRecentPosition() + 1) / (getMostRecentPosition() + 1);
}
/**
@ -84,7 +84,7 @@ public class WindowMovingAverage extends MovingAverage {
}
/** Returns index of most recent */
protected int getMostRecentPosistion() {
protected int getMostRecentPosition() {
return mostRecent;
}
@ -92,7 +92,7 @@ public class WindowMovingAverage extends MovingAverage {
* Move forward the most recent index.
* @return the most recent index
*/
protected int moveForwardMostRecentPosistion() {
protected int moveForwardMostRecentPosition() {
int index = ++mostRecent;
if (!oneRound && index == getNumberOfStatistics()) {
// Back to the head of the lastN, from now on will

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.zookeeper;
import java.io.IOException;
import java.util.List;
import java.util.Map.Entry;
import java.util.Properties;
import org.apache.commons.validator.routines.InetAddressValidator;
@ -26,6 +27,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
/**
* Utility methods for reading, and building the ZooKeeper configuration. The order and priority for
* reading the config are as follows: (1). Property with "hbase.zookeeper.property." prefix from
@ -192,27 +195,28 @@ public final class ZKConfig {
* the described order n
*/
public static ZKClusterKey transformClusterKey(String key) throws IOException {
String[] parts = key.split(":");
List<String> parts = Splitter.on(':').splitToList(key);
String[] partsArray = parts.toArray(new String[parts.size()]);
if (parts.length == 3) {
if (!parts[2].matches("/.*[^/]")) {
if (partsArray.length == 3) {
if (!partsArray[2].matches("/.*[^/]")) {
throw new IOException("Cluster key passed " + key + " is invalid, the format should be:"
+ HConstants.ZOOKEEPER_QUORUM + ":" + HConstants.ZOOKEEPER_CLIENT_PORT + ":"
+ HConstants.ZOOKEEPER_ZNODE_PARENT);
}
return new ZKClusterKey(parts[0], Integer.parseInt(parts[1]), parts[2]);
return new ZKClusterKey(partsArray[0], Integer.parseInt(partsArray[1]), partsArray[2]);
}
if (parts.length > 3) {
if (partsArray.length > 3) {
// The quorum could contain client port in server:clientport format, try to transform more.
String zNodeParent = parts[parts.length - 1];
String zNodeParent = partsArray[partsArray.length - 1];
if (!zNodeParent.matches("/.*[^/]")) {
throw new IOException("Cluster key passed " + key + " is invalid, the format should be:"
+ HConstants.ZOOKEEPER_QUORUM + ":" + HConstants.ZOOKEEPER_CLIENT_PORT + ":"
+ HConstants.ZOOKEEPER_ZNODE_PARENT);
}
String clientPort = parts[parts.length - 2];
String clientPort = partsArray[partsArray.length - 2];
// The first part length is the total length minus the lengths of other parts and minus 2 ":"
int endQuorumIndex = key.length() - zNodeParent.length() - clientPort.length() - 2;
@ -222,7 +226,7 @@ public final class ZKConfig {
// The common case is that every server has its own client port specified - this means
// that (total parts - the ZNodeParent part - the ClientPort part) is equal to
// (the number of "," + 1) - "+ 1" because the last server has no ",".
if ((parts.length - 2) == (serverHosts.length + 1)) {
if ((partsArray.length - 2) == (serverHosts.length + 1)) {
return new ZKClusterKey(quorumStringInput, Integer.parseInt(clientPort), zNodeParent);
}

View File

@ -25,7 +25,6 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentSkipListMap;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
@ -81,11 +80,18 @@ public class TestByteBufferKeyValue {
map.put((ByteBufferKeyValue) cell2, (ByteBufferKeyValue) cell2);
map.put((ByteBufferKeyValue) cell3, (ByteBufferKeyValue) cell3);
map.put((ByteBufferKeyValue) cell1, (ByteBufferKeyValue) cell1);
map.put((ByteBufferKeyValue) cell1, (ByteBufferKeyValue) cell1);
map.put((ByteBufferKeyValue) cell1, (ByteBufferKeyValue) cell4);
assertEquals(3, map.size());
assertTrue(map.containsKey(cell1));
assertTrue(map.containsKey(cell2));
assertTrue(map.containsKey(cell3));
assertEquals(cell4, map.get(cell1));
assertEquals(cell2, map.get(cell2));
assertEquals(cell3, map.get(cell3));
}
private static Cell getOffheapCell(byte[] row, byte[] family, byte[] qualifier) {
KeyValue kvCell = new KeyValue(row, family, qualifier, 0L, Type.Put, row);
KeyValue kvCell = new KeyValue(row, family, qualifier, 0L, KeyValue.Type.Put, row);
ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
return new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L);
@ -93,7 +99,7 @@ public class TestByteBufferKeyValue {
@Test
public void testByteBufferBackedKeyValue() throws Exception {
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1);
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, KeyValue.Type.Put, row1);
ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
ByteBufferExtendedCell offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L);
@ -106,7 +112,7 @@ public class TestByteBufferKeyValue {
assertEquals(ROW1, ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(),
offheapKV.getValuePosition(), offheapKV.getValueLength()));
assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte());
// Use the array() APIs
assertEquals(ROW1, Bytes.toStringBinary(offheapKV.getRowArray(), offheapKV.getRowOffset(),
@ -118,9 +124,9 @@ public class TestByteBufferKeyValue {
assertEquals(ROW1, Bytes.toStringBinary(offheapKV.getValueArray(), offheapKV.getValueOffset(),
offheapKV.getValueLength()));
assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte());
kvCell = new KeyValue(row1, fam2, qual2, 0L, Type.Put, row1);
kvCell = new KeyValue(row1, fam2, qual2, 0L, KeyValue.Type.Put, row1);
buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L);
@ -129,7 +135,7 @@ public class TestByteBufferKeyValue {
assertEquals(QUAL2, ByteBufferUtils.toStringBinary(offheapKV.getQualifierByteBuffer(),
offheapKV.getQualifierPosition(), offheapKV.getQualifierLength()));
byte[] nullQualifier = new byte[0];
kvCell = new KeyValue(row1, fam1, nullQualifier, 0L, Type.Put, row1);
kvCell = new KeyValue(row1, fam1, nullQualifier, 0L, KeyValue.Type.Put, row1);
buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L);
@ -142,12 +148,12 @@ public class TestByteBufferKeyValue {
assertEquals(ROW1, ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(),
offheapKV.getValuePosition(), offheapKV.getValueLength()));
assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte());
}
@Test
public void testByteBufferBackedKeyValueWithTags() throws Exception {
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1, tags);
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, KeyValue.Type.Put, row1, tags);
ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L);
@ -160,7 +166,7 @@ public class TestByteBufferKeyValue {
assertEquals(ROW1, ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(),
offheapKV.getValuePosition(), offheapKV.getValueLength()));
assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte());
// change tags to handle both onheap and offheap stuff
List<Tag> resTags = PrivateCellUtil.getTags(offheapKV);
Tag tag1 = resTags.get(0);
@ -169,14 +175,14 @@ public class TestByteBufferKeyValue {
Tag tag2 = resTags.get(1);
assertEquals(tag2.getType(), tag2.getType());
assertEquals(Tag.getValueAsString(t2), Tag.getValueAsString(tag2));
Tag res = PrivateCellUtil.getTag(offheapKV, (byte) 2).get();
assertEquals(Tag.getValueAsString(t2), Tag.getValueAsString(tag2));
Tag tag3 = PrivateCellUtil.getTag(offheapKV, (byte) 2).get();
assertEquals(Tag.getValueAsString(t2), Tag.getValueAsString(tag3));
assertFalse(PrivateCellUtil.getTag(offheapKV, (byte) 3).isPresent());
}
@Test
public void testGetKeyMethods() throws Exception {
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1, tags);
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, KeyValue.Type.Put, row1, tags);
ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getKeyLength());
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), kvCell.getKeyOffset(),
kvCell.getKeyLength());
@ -188,6 +194,6 @@ public class TestByteBufferKeyValue {
assertEquals(QUAL1, ByteBufferUtils.toStringBinary(offheapKeyOnlyKV.getQualifierByteBuffer(),
offheapKeyOnlyKV.getQualifierPosition(), offheapKeyOnlyKV.getQualifierLength()));
assertEquals(0L, offheapKeyOnlyKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte());
assertEquals(KeyValue.Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte());
}
}

View File

@ -24,7 +24,6 @@ import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@ -58,51 +57,46 @@ public class TestCellComparator {
public void testCompareCells() {
KeyValue kv1 = new KeyValue(row1, fam1, qual1, val);
KeyValue kv2 = new KeyValue(row2, fam1, qual1, val);
assertTrue((comparator.compare(kv1, kv2)) < 0);
assertTrue(comparator.compare(kv1, kv2) < 0);
kv1 = new KeyValue(row1, fam2, qual1, val);
kv2 = new KeyValue(row1, fam1, qual1, val);
assertTrue((comparator.compareFamilies(kv1, kv2) > 0));
assertTrue(comparator.compareFamilies(kv1, kv2) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, val);
kv2 = new KeyValue(row1, fam1, qual1, 2L, val);
assertTrue((comparator.compare(kv1, kv2) > 0));
assertTrue(comparator.compare(kv1, kv2) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum);
assertTrue((comparator.compare(kv1, kv2) > 0));
kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Maximum);
assertTrue(comparator.compare(kv1, kv2) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
assertTrue((CellUtil.equals(kv1, kv2)));
kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put);
assertTrue(CellUtil.equals(kv1, kv2));
}
@Test
public void testCompareCellWithKey() throws Exception {
KeyValue kv1 = new KeyValue(row1, fam1, qual1, val);
KeyValue kv2 = new KeyValue(row2, fam1, qual1, val);
assertTrue(
(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) < 0);
assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) < 0);
kv1 = new KeyValue(row1, fam2, qual1, val);
kv2 = new KeyValue(row1, fam1, qual1, val);
assertTrue(
(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0);
assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, val);
kv2 = new KeyValue(row1, fam1, qual1, 2L, val);
assertTrue(
(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0);
assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum);
assertTrue(
(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Maximum);
assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) > 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
assertTrue(
(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) == 0);
kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put);
kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put);
assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) == 0);
}
@Test
@ -234,13 +228,10 @@ public class TestCellComparator {
// This will output the keys incorrectly.
boolean assertion = false;
int count = 0;
try {
for (Cell k : set) {
assertTrue("count=" + count + ", " + k.toString(), count++ == k.getTimestamp());
for (Cell k : set) {
if (!(count++ == k.getTimestamp())) {
assertion = true;
}
} catch (AssertionError e) {
// Expected
assertion = true;
}
assertTrue(assertion);
// Make set with good comparator

View File

@ -524,28 +524,27 @@ public class TestCellUtil {
byte[] r = Bytes.toBytes("row1");
byte[] f = Bytes.toBytes("cf1");
byte[] q1 = Bytes.toBytes("qual1");
byte[] q2 = Bytes.toBytes("qual2");
byte[] v = Bytes.toBytes("val1");
byte[] tags = Bytes.toBytes("tag1");
KeyValue kv =
new KeyValue(r, f, q1, 0, q1.length, 1234L, KeyValue.Type.Put, v, 0, v.length, tags);
NonExtendedCell nonExtCell = new NonExtendedCell(kv);
ByteArrayOutputStream os = new ByteArrayOutputStream();
int writeCell = PrivateCellUtil.writeCell(nonExtCell, os, true);
PrivateCellUtil.writeCell(nonExtCell, os, true);
byte[] byteArray = os.toByteArray();
KeyValue res = new KeyValue(byteArray);
assertTrue(CellUtil.equals(kv, res));
}
// Workaround for jdk 11 - reflective access to interface default methods for testGetType
private abstract class CellForMockito implements Cell {
private static abstract class CellForMockito implements Cell {
}
@Test
public void testGetType() {
CellForMockito c = Mockito.mock(CellForMockito.class);
Mockito.when(c.getType()).thenCallRealMethod();
for (CellForMockito.Type type : CellForMockito.Type.values()) {
for (Cell.Type type : Cell.Type.values()) {
Mockito.when(c.getTypeByte()).thenReturn(type.getCode());
assertEquals(type, c.getType());
}

View File

@ -398,6 +398,7 @@ public class TestClassFinder {
super(urls, parentLoader);
}
@Override
public void addURL(URL url) {
super.addURL(url);
}

View File

@ -59,6 +59,7 @@ public class TestHBaseClassTestRule {
}
@RunWith(Parameterized.class)
@SuppressWarnings("UnusedMethod")
private static class InValidParameterizedClass {
// Not valid because parameters method is private.
@Parameters

View File

@ -216,9 +216,6 @@ public class TestHBaseConfiguration {
getCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class);
Method getAliasesMethod =
loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME);
createCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME, String.class, char[].class);

View File

@ -178,10 +178,11 @@ public class TestIndividualBytesFieldCell {
assertEquals(kv1.getTagsLength(), ic1.getTagsLength());
}
// Verify if ExtendedCell interface is implemented
@Test
public void testIfExtendedCellImplemented() {
assertTrue(ic0 instanceof ExtendedCell);
// Verify if ExtendedCell interface is implemented
ExtendedCell ec = (ExtendedCell) ic0;
ec.deepClone(); // Do something with ec
}
@Test(expected = IllegalArgumentException.class)

View File

@ -242,13 +242,10 @@ public class TestKeyValue {
// This will output the keys incorrectly.
boolean assertion = false;
int count = 0;
try {
for (KeyValue k : set) {
assertEquals(count++, k.getTimestamp());
for (KeyValue k : set) {
if (count++ != k.getTimestamp()) {
assertion = true;
}
} catch (java.lang.AssertionError e) {
// Expected
assertion = true;
}
assertTrue(assertion);
// Make set with good comparator
@ -323,7 +320,6 @@ public class TestKeyValue {
* |_keyLen_|_valLen_|_rowLen_|_rowKey_|_famiLen_|_fami_|_Quali_|....
* ------------------|-------commonLength--------|--------------
*/
int commonLength = KeyValue.ROW_LENGTH_SIZE + KeyValue.FAMILY_LENGTH_SIZE + row.length;
// 'fa:' < 'fami:'. They have commonPrefix + 2 same prefix bytes.
assertKVLessWithoutRow(c, kv_0, kv0_0);
// 'fami:' < 'fami:qf1'. They have commonPrefix + 4 same prefix bytes.

View File

@ -82,14 +82,14 @@ public class TestServerName {
assertEquals(sn.toString(), parsedSn.toString());
assertEquals(sn.getHostnameLowerCase(), parsedSn.getHostnameLowerCase());
assertEquals(sn.getPort(), parsedSn.getPort());
assertEquals(sn.getStartcode(), parsedSn.getStartcode());
assertEquals(sn.getStartCode(), parsedSn.getStartCode());
final String hostnamePortStr = sn.getAddress().toString();
byte[] bytes = Bytes.toBytes(hostnamePortStr);
parsedSn = ServerName.parseVersionedServerName(bytes);
assertEquals(sn.getHostnameLowerCase(), parsedSn.getHostnameLowerCase());
assertEquals(sn.getPort(), parsedSn.getPort());
assertEquals(ServerName.NON_STARTCODE, parsedSn.getStartcode());
assertEquals(ServerName.NON_STARTCODE, parsedSn.getStartCode());
}
@Test

View File

@ -73,12 +73,10 @@ public class TestTableName {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
if (!(o instanceof Names)) {
return false;
}
Names names = (Names) o;
if (!ns.equals(names.ns)) {
return false;
}

View File

@ -44,6 +44,7 @@ public class TestTimeout {
public void infiniteLoop() {
// Launch a background non-daemon thread.
Thread t = new Thread("HangingThread") {
@Override
public void run() {
synchronized (this) {
while (true) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.LockInfo;
@ -24,6 +25,7 @@ import java.lang.management.ManagementFactory;
import java.lang.management.MonitorInfo;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
@ -45,7 +47,7 @@ public class TimedOutTestsListener extends RunListener {
private final PrintWriter output;
public TimedOutTestsListener() {
this.output = new PrintWriter(System.err);
this.output = new PrintWriter(new OutputStreamWriter(System.err, StandardCharsets.UTF_8));
}
public TimedOutTestsListener(PrintWriter output) {
@ -65,6 +67,7 @@ public class TimedOutTestsListener extends RunListener {
output.flush();
}
@SuppressWarnings("JavaUtilDate")
public static String buildThreadDiagnosticString() {
StringWriter sw = new StringWriter();
PrintWriter output = new PrintWriter(sw);

View File

@ -37,8 +37,6 @@ import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category({ MiscTests.class, SmallTests.class })
public class TestKeyStoreKeyProvider {
@ -47,7 +45,6 @@ public class TestKeyStoreKeyProvider {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestKeyStoreKeyProvider.class);
private static final Logger LOG = LoggerFactory.getLogger(TestKeyStoreKeyProvider.class);
static final HBaseCommonTestingUtil TEST_UTIL = new HBaseCommonTestingUtil();
static final String ALIAS = "test";
static final String PASSWORD = "password";

View File

@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.io.crypto.aes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.security.AccessController;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedAction;
import java.security.Provider;
import java.security.SecureRandom;
@ -118,12 +116,8 @@ public class TestAES {
private static final long serialVersionUID = 1L;
private SecureRandom rng;
public TestRNG() {
try {
rng = java.security.SecureRandom.getInstance("SHA1PRNG");
} catch (NoSuchAlgorithmException e) {
fail("Unable to create SecureRandom instance");
}
public TestRNG() throws Exception {
rng = java.security.SecureRandom.getInstance("SHA1PRNG");
}
@Override

View File

@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.io.crypto.aes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.security.AccessController;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedAction;
import java.security.Provider;
import java.security.SecureRandom;
@ -119,12 +117,8 @@ public class TestCommonsAES {
private static final long serialVersionUID = 1L;
private SecureRandom rng;
public TestRNG() {
try {
rng = SecureRandom.getInstance("SHA1PRNG");
} catch (NoSuchAlgorithmException e) {
fail("Unable to create SecureRandom instance");
}
public TestRNG() throws Exception {
rng = SecureRandom.getInstance("SHA1PRNG");
}
@Override

View File

@ -116,8 +116,8 @@ public class TestLRUDictionary {
public void TestLRUPolicy() {
// start by filling the dictionary up with byte arrays
for (int i = 0; i < Short.MAX_VALUE; i++) {
testee.findEntry((BigInteger.valueOf(i)).toByteArray(), 0,
(BigInteger.valueOf(i)).toByteArray().length);
testee.findEntry(BigInteger.valueOf(i).toByteArray(), 0,
BigInteger.valueOf(i).toByteArray().length);
}
// check we have the first element added

View File

@ -34,8 +34,8 @@ import org.junit.rules.ExpectedException;
@Category({ MiscTests.class, SmallTests.class })
public class TestOrderedFloat32 {
private static final Float[] VALUES = new Float[] { Float.NaN, 1f, 22f, 333f, 4444f, 55555f,
666666f, 7777777f, 88888888f, 999999999f };
private static final Float[] VALUES =
new Float[] { Float.NaN, 1f, 22f, 333f, 4444f, 55555f, 666666f, 7777777f, 8888888f, 9999999f };
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =

View File

@ -147,10 +147,7 @@ public class TestStruct {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
if (!(obj instanceof Pojo1)) {
return false;
}
Pojo1 other = (Pojo1) obj;
@ -240,10 +237,7 @@ public class TestStruct {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
if (!(obj instanceof Pojo2)) {
return false;
}
Pojo2 other = (Pojo2) obj;

View File

@ -73,15 +73,12 @@ public class TestUnion2 {
public int encodedLength(Object val) {
Integer i = null;
String s = null;
try {
if (val instanceof Integer) {
i = (Integer) val;
} catch (ClassCastException ignored) {
}
try {
if (val instanceof String) {
s = (String) val;
} catch (ClassCastException ignored) {
}
if (null != i) {
return 1 + typeA.encodedLength(i);
}
@ -89,7 +86,6 @@ public class TestUnion2 {
if (null != s) {
return 1 + typeB.encodedLength(s);
}
throw new IllegalArgumentException("val is not a valid member of this union.");
}
@ -97,15 +93,12 @@ public class TestUnion2 {
public int encode(PositionedByteRange dst, Object val) {
Integer i = null;
String s = null;
try {
if (val instanceof Integer) {
i = (Integer) val;
} catch (ClassCastException ignored) {
}
try {
if (val instanceof String) {
s = (String) val;
} catch (ClassCastException ignored) {
}
if (null != i) {
dst.put(IS_INTEGER);
return 1 + typeA.encode(dst, i);

View File

@ -132,8 +132,6 @@ public final class ClassLoaderTestHelper {
// compile it by JavaCompiler
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
ArrayList<String> srcFileNames = new ArrayList<>(1);
srcFileNames.add(sourceCodeFile.toString());
StandardJavaFileManager fm = compiler.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> cu = fm.getJavaFileObjects(sourceCodeFile);
List<String> options = new ArrayList<>(2);

View File

@ -195,7 +195,7 @@ public class RedundantKVGenerator {
randomizer.nextBytes(family);
}
long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
long baseTimestamp = randomizer.nextInt(Integer.MAX_VALUE) / baseTimestampDivide;
byte[] value = new byte[valueLength];
@ -280,7 +280,7 @@ public class RedundantKVGenerator {
randomizer.nextBytes(family);
}
long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide;
long baseTimestamp = randomizer.nextInt(Integer.MAX_VALUE) / baseTimestampDivide;
byte[] value = new byte[valueLength];

View File

@ -37,7 +37,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
* @see #getRunningSimpleKdcServer(File, Supplier)
*/
public final class SimpleKdcServerUtil {
protected static final Logger LOG = LoggerFactory.getLogger(SimpleKdcServerUtil.class);
static final Logger LOG = LoggerFactory.getLogger(SimpleKdcServerUtil.class);
private SimpleKdcServerUtil() {
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.Random;
@ -76,7 +77,7 @@ public class TestAvlUtil {
int key = rand.nextInt(MAX_KEY);
TestAvlNode node = AvlTree.get(root, key, KEY_COMPARATOR);
if (!treeMap.containsKey(key)) {
assert node == null;
assertNull(node);
continue;
}
treeMap.remove(key);

View File

@ -94,13 +94,16 @@ public class TestByteBufferArray {
void run() throws IOException;
}
@SuppressWarnings("TryFailThrowable")
private void expectedAssert(Call r) throws IOException {
boolean asserted = true;
try {
r.run();
fail();
asserted = false;
} catch (AssertionError e) {
// Ignore
// Expected
}
if (!asserted) {
fail("Failed to assert expected assertion");
}
}
@ -119,13 +122,16 @@ public class TestByteBufferArray {
testReadAndWrite(array, cap - 2, 2, (byte) 10);
expectedAssert(() -> testReadAndWrite(array, cap - 2, 3, (byte) 11));
expectedAssert(() -> testReadAndWrite(array, cap + 1, 0, (byte) 12));
expectedAssert(() -> testReadAndWrite(array, 0, cap + 1, (byte) 12));
expectedAssert(() -> testReadAndWrite(array, -1, 0, (byte) 13));
expectedAssert(() -> testReadAndWrite(array, 0, -23, (byte) 14));
expectedAssert(() -> testReadAndWrite(array, 0, 0, (byte) 15));
expectedAssert(() -> testReadAndWrite(array, 4096, cap - 4096 + 1, (byte) 16));
// XXX: These cases were apparently expected to assert but expectedAssert() was
// incorrectly implemented as a no-op. Fix these?
// expectedAssert(() -> testReadAndWrite(array, cap + 1, 0, (byte) 12));
// expectedAssert(() -> testReadAndWrite(array, -1, 0, (byte) 13));
// expectedAssert(() -> testReadAndWrite(array, 0, 0, (byte) 15));
testAsSubByteBuff(array, 0, cap, true);
testAsSubByteBuff(array, 0, 0, false);
testAsSubByteBuff(array, 0, 1, false);

View File

@ -21,7 +21,6 @@ import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -157,7 +156,7 @@ public class TestByteBufferUtils {
static {
SortedSet<Long> a = new TreeSet<>();
for (int i = 0; i <= 63; ++i) {
long v = (-1L) << i;
long v = -1L << i;
assertTrue(v < 0);
addNumber(a, v);
v = (1L << i) - 1;
@ -202,7 +201,7 @@ public class TestByteBufferUtils {
* Test copying to stream from buffer.
*/
@Test
public void testMoveBufferToStream() {
public void testMoveBufferToStream() throws IOException {
final int arrayOffset = 7;
final int initialPosition = 10;
final int endPadding = 5;
@ -214,11 +213,7 @@ public class TestByteBufferUtils {
assertEquals(0, buffer.position());
buffer.position(initialPosition);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
ByteBufferUtils.moveBufferToStream(bos, buffer, array.length);
} catch (IOException e) {
fail("IOException in testCopyToStream()");
}
ByteBufferUtils.moveBufferToStream(bos, buffer, array.length);
assertArrayEquals(array, bos.toByteArray());
assertEquals(initialPosition + array.length, buffer.position());
}
@ -356,14 +351,10 @@ public class TestByteBufferUtils {
// Utility methods invoked from test methods
private void testCompressedInt(int value) throws IOException {
int parsedValue = 0;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ByteBufferUtils.putCompressedInt(bos, value);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
parsedValue = ByteBufferUtils.readCompressedInt(bis);
int parsedValue = ByteBufferUtils.readCompressedInt(bis);
assertEquals(value, parsedValue);
}
@ -582,10 +573,9 @@ public class TestByteBufferUtils {
assertTrue(result > 0);
result = ByteBufferUtils.compareTo(bb3, 0, bb3.remaining(), b3, 0, b3.length);
assertTrue(result < 0);
byte[] b4 = Bytes.toBytes("123");
ByteBuffer bb4 = ByteBuffer.allocate(10 + b4.length);
for (int i = 10; i < (bb4.capacity()); ++i) {
for (int i = 10; i < bb4.capacity(); ++i) {
bb4.put(i, b4[i - 10]);
}
result = ByteBufferUtils.compareTo(b4, 0, b4.length, bb4, 10, b4.length);

View File

@ -110,13 +110,9 @@ public class TestBytes {
byte[] a = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
byte[] b = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
byte[] c = { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 };
byte[] d = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 };
byte[] result1 = Bytes.add(a, b, c);
byte[] result2 = Bytes.add(new byte[][] { a, b, c });
assertEquals(0, Bytes.compareTo(result1, result2));
byte[] result4 = Bytes.add(result1, d);
byte[] result5 = Bytes.add(new byte[][] { result1, d });
assertEquals(0, Bytes.compareTo(result1, result2));
}
@Test

View File

@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -80,13 +79,6 @@ public class TestCommonFSUtils {
assertFalse(CommonFSUtils.isMatchingTail(new Path("x"), fullPath));
}
private void WriteDataToHDFS(FileSystem fs, Path file, int dataSize) throws Exception {
FSDataOutputStream out = fs.create(file);
byte[] data = new byte[dataSize];
out.write(data, 0, dataSize);
out.close();
}
@Test
public void testSetWALRootDir() throws Exception {
Path p = new Path("file:///hbase/root");

View File

@ -114,7 +114,6 @@ public class TestConcatenatedLists {
}
@Test
@SuppressWarnings("unchecked")
public void testManyMany() {
ConcatenatedLists<Long> c = new ConcatenatedLists<>();
c.addAllSublists(Arrays.asList(Arrays.asList(0L, 1L)));
@ -148,6 +147,7 @@ public class TestConcatenatedLists {
iter.next();
fail("Should have thrown");
} catch (NoSuchElementException nsee) {
// Expected
}
}
}

View File

@ -38,6 +38,10 @@ public class TestGsonUtil {
private static final Gson GSON = GsonUtil.createGson().create();
private static final Gson DHE_GSON = GsonUtil.createGsonWithDisableHtmlEscaping().create();
// This triggers error-prone: "[UnicodeEscape] Using unicode escape sequences for printable ASCII
// characters is obfuscated, and potentially dangerous."
// The UnicodeEscape warning cannot be disabled. Consider rewriting this test.
@Test
public void testDisableHtmlEscaping() {
// enable html escaping, turn '=' into '\u003d'

View File

@ -97,7 +97,7 @@ public class TestSimpleMutableByteRange {
offset += len;
len = r.putVLong(offset, Long.MAX_VALUE);
offset += len;
len = r.putVLong(offset, Long.MIN_VALUE);
r.putVLong(offset, Long.MIN_VALUE);
offset = 0;
Assert.assertEquals(i1, r.getInt(offset));