HBASE-26523 + HBASE-25465 + HBASE-26855 backport to branch-2.4 (#4439)

* HBASE-26523 Upgrade hbase-thirdparty dependency to 4.0.1 (#3988)

Signed-off-by: GeorryHuang <huangzhuoyue@apache.org>

* HBASE-25465 Use javac --release option for supporting cross version compilation (#4164)

Signed-off-by: Andrew Purtell <apurtell@apache.org>

* HBASE-26855 Delete unnecessary dependency on jaxb-runtime jar (#4236)

Signed-off-by: Duo Zhang <zhangduo@apache.org>

* spotless apply

Co-authored-by: Duo Zhang <zhangduo@apache.org>
Co-authored-by: Nick Dimiduk <ndimiduk@apache.org>

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Viraj Jasani 2022-05-18 16:28:55 -07:00 committed by GitHub
parent ee284b6107
commit 91a44f5bac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 471 additions and 573 deletions

View File

@ -27,9 +27,9 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
@ -58,7 +58,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesP
*/
@InterfaceAudience.Public
public class FuzzyRowFilter extends FilterBase {
private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
private static final boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
// the wildcard byte is 1 on the user side. but the filter converts it internally
// in preprocessMask. This was changed in HBASE-15676 due to a bug with using 0.

View File

@ -60,6 +60,10 @@
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-netty</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-unsafe</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>

View File

@ -29,10 +29,10 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.UnsafeAccess;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.nio.ch.DirectBuffer;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
@ -362,11 +362,8 @@ public class ByteBuffAllocator {
public void clean() {
while (!buffers.isEmpty()) {
ByteBuffer b = buffers.poll();
if (b instanceof DirectBuffer) {
DirectBuffer db = (DirectBuffer) b;
if (db.cleaner() != null) {
db.cleaner().clean();
}
if (b.isDirect()) {
UnsafeAccess.freeDirectBuffer(b);
}
}
this.usedBufCount.set(0);

View File

@ -24,12 +24,11 @@ import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import org.apache.hadoop.hbase.io.ByteBuffAllocator.Recycler;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.ObjectIntPair;
import org.apache.hadoop.hbase.util.UnsafeAccess;
import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
import org.apache.yetus.audience.InterfaceAudience;
import sun.nio.ch.DirectBuffer;
/**
* An implementation of ByteBuff where a single BB backs the BBI. This just acts as a wrapper over a
@ -38,8 +37,8 @@ import sun.nio.ch.DirectBuffer;
@InterfaceAudience.Private
public class SingleByteBuff extends ByteBuff {
private static final boolean UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
private static final boolean UNSAFE_AVAIL = HBasePlatformDependent.isUnsafeAvailable();
private static final boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
// Underlying BB
private final ByteBuffer buf;
@ -63,7 +62,7 @@ public class SingleByteBuff extends ByteBuff {
this.unsafeOffset = UnsafeAccess.BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset();
this.unsafeRef = buf.array();
} else {
this.unsafeOffset = ((DirectBuffer) buf).address();
this.unsafeOffset = UnsafeAccess.directBufferAddress(buf);
}
}

View File

@ -31,17 +31,16 @@ import java.util.Arrays;
import org.apache.hadoop.hbase.io.ByteBufferWriter;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
import sun.nio.ch.DirectBuffer;
/**
* Utility functions for working with byte buffers, such as reading/writing variable-length long
* numbers.
* @deprecated This class will become IA.Private in HBase 3.0. Downstream folks shouldn't use it.
*/
@SuppressWarnings("restriction")
@Deprecated
@InterfaceAudience.Public
public final class ByteBufferUtils {
@ -50,8 +49,8 @@ public final class ByteBufferUtils {
public final static int NEXT_BIT_SHIFT = 7;
public final static int NEXT_BIT_MASK = 1 << 7;
@InterfaceAudience.Private
final static boolean UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
public final static boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
final static boolean UNSAFE_AVAIL = HBasePlatformDependent.isUnsafeAvailable();
public final static boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
private ByteBufferUtils() {
}
@ -91,11 +90,10 @@ public final class ByteBufferUtils {
static Comparer getBestComparer() {
try {
Class<?> theClass = Class.forName(UNSAFE_COMPARER_NAME);
Class<? extends Comparer> theClass =
Class.forName(UNSAFE_COMPARER_NAME).asSubclass(Comparer.class);
@SuppressWarnings("unchecked")
Comparer comparer = (Comparer) theClass.getConstructor().newInstance();
return comparer;
return theClass.getConstructor().newInstance();
} catch (Throwable t) { // ensure we really catch *everything*
return PureJavaComparer.INSTANCE;
}
@ -152,7 +150,7 @@ public final class ByteBufferUtils {
long offset2Adj;
Object refObj2 = null;
if (buf2.isDirect()) {
offset2Adj = o2 + ((DirectBuffer) buf2).address();
offset2Adj = o2 + UnsafeAccess.directBufferAddress(buf2);
} else {
offset2Adj = o2 + buf2.arrayOffset() + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
refObj2 = buf2.array();
@ -166,13 +164,13 @@ public final class ByteBufferUtils {
long offset1Adj, offset2Adj;
Object refObj1 = null, refObj2 = null;
if (buf1.isDirect()) {
offset1Adj = o1 + ((DirectBuffer) buf1).address();
offset1Adj = o1 + UnsafeAccess.directBufferAddress(buf1);
} else {
offset1Adj = o1 + buf1.arrayOffset() + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
refObj1 = buf1.array();
}
if (buf2.isDirect()) {
offset2Adj = o2 + ((DirectBuffer) buf2).address();
offset2Adj = o2 + UnsafeAccess.directBufferAddress(buf2);
} else {
offset2Adj = o2 + buf2.arrayOffset() + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
refObj2 = buf2.array();
@ -189,12 +187,11 @@ public final class ByteBufferUtils {
static Converter getBestConverter() {
try {
Class<?> theClass = Class.forName(UNSAFE_CONVERTER_NAME);
Class<? extends Converter> theClass =
Class.forName(UNSAFE_CONVERTER_NAME).asSubclass(Converter.class);
// yes, UnsafeComparer does implement Comparer<byte[]>
@SuppressWarnings("unchecked")
Converter converter = (Converter) theClass.getConstructor().newInstance();
return converter;
return theClass.getConstructor().newInstance();
} catch (Throwable t) { // ensure we really catch *everything*
return PureJavaConverter.INSTANCE;
}
@ -932,8 +929,8 @@ public final class ByteBufferUtils {
* 64-bit.
*/
for (i = 0; i < strideLimit; i += stride) {
long lw = UnsafeAccess.theUnsafe.getLong(obj1, o1 + (long) i);
long rw = UnsafeAccess.theUnsafe.getLong(obj2, o2 + (long) i);
long lw = HBasePlatformDependent.getLong(obj1, o1 + (long) i);
long rw = HBasePlatformDependent.getLong(obj2, o2 + (long) i);
if (lw != rw) {
if (!UnsafeAccess.LITTLE_ENDIAN) {
return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1;
@ -953,8 +950,8 @@ public final class ByteBufferUtils {
// The epilogue to cover the last (minLength % stride) elements.
for (; i < minLength; i++) {
int il = (UnsafeAccess.theUnsafe.getByte(obj1, o1 + i) & 0xFF);
int ir = (UnsafeAccess.theUnsafe.getByte(obj2, o2 + i) & 0xFF);
int il = (HBasePlatformDependent.getByte(obj1, o1 + i) & 0xFF);
int ir = (HBasePlatformDependent.getByte(obj2, o2 + i) & 0xFF);
if (il != ir) {
return il - ir;
}

View File

@ -42,13 +42,13 @@ import java.util.Random;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.misc.Unsafe;
import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
@ -56,7 +56,6 @@ import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUti
* Utility class that handles byte arrays, conversions to/from other types, comparisons, hash code
* generation, manufacturing keys for HashMaps or HashSets, and can be used as key in maps or trees.
*/
@SuppressWarnings("restriction")
@InterfaceAudience.Public
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value = "EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
@ -127,7 +126,7 @@ public class Bytes implements Comparable<Bytes> {
public static final int ESTIMATED_HEAP_TAX = 16;
@InterfaceAudience.Private
static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
static final boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
/**
* Returns length of the byte array, returning 0 if the array is null. Useful for calculating
@ -1428,22 +1427,18 @@ public class Bytes implements Comparable<Bytes> {
protected static final class UnsafeConverter extends Converter {
static final Unsafe theUnsafe;
public UnsafeConverter() {
}
static {
if (UNSAFE_UNALIGNED) {
theUnsafe = UnsafeAccess.theUnsafe;
} else {
if (!UNSAFE_UNALIGNED) {
// It doesn't matter what we throw;
// it's swallowed in getBestComparer().
throw new Error();
}
// sanity check - this should never fail
if (theUnsafe.arrayIndexScale(byte[].class) != 1) {
if (HBasePlatformDependent.arrayIndexScale(byte[].class) != 1) {
throw new AssertionError();
}
}
@ -1482,7 +1477,7 @@ public class Bytes implements Comparable<Bytes> {
/**
* Provides a lexicographical comparer implementation; either a Java implementation or a faster
* implementation based on {@link Unsafe}.
* implementation based on {@code Unsafe}.
* <p>
* Uses reflection to gracefully fall back to the Java implementation if {@code Unsafe} isn't
* available.
@ -1539,18 +1534,15 @@ public class Bytes implements Comparable<Bytes> {
enum UnsafeComparer implements Comparer<byte[]> {
INSTANCE;
static final Unsafe theUnsafe;
static {
if (UNSAFE_UNALIGNED) {
theUnsafe = UnsafeAccess.theUnsafe;
} else {
if (!UNSAFE_UNALIGNED) {
// It doesn't matter what we throw;
// it's swallowed in getBestComparer().
throw new Error();
}
// sanity check - this should never fail
if (theUnsafe.arrayIndexScale(byte[].class) != 1) {
if (HBasePlatformDependent.arrayIndexScale(byte[].class) != 1) {
throw new AssertionError();
}
}
@ -1585,8 +1577,8 @@ public class Bytes implements Comparable<Bytes> {
* than 4 bytes even on 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
for (i = 0; i < strideLimit; i += stride) {
long lw = theUnsafe.getLong(buffer1, offset1Adj + i);
long rw = theUnsafe.getLong(buffer2, offset2Adj + i);
long lw = HBasePlatformDependent.getLong(buffer1, offset1Adj + i);
long rw = HBasePlatformDependent.getLong(buffer2, offset2Adj + i);
if (lw != rw) {
if (!UnsafeAccess.LITTLE_ENDIAN) {
return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1;

View File

@ -21,6 +21,7 @@ import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -178,13 +179,19 @@ public class ClassSize {
private byte a;
}
private static final int ARRAY_OBJECT_INDEX_SCALE =
HBasePlatformDependent.arrayIndexScale(Object[].class);
private static final int ARRAY_BYTE_INDEX_SCALE =
HBasePlatformDependent.arrayIndexScale(byte[].class);
public UnsafeLayout() {
}
@Override
int headerSize() {
try {
return (int) UnsafeAccess.theUnsafe
return (int) HBasePlatformDependent
.objectFieldOffset(HeaderSize.class.getDeclaredField("a"));
} catch (NoSuchFieldException | SecurityException e) {
LOG.error(e.toString(), e);
@ -194,21 +201,19 @@ public class ClassSize {
@Override
int arrayHeaderSize() {
return UnsafeAccess.theUnsafe.arrayBaseOffset(byte[].class);
return HBasePlatformDependent.arrayBaseOffset(byte[].class);
}
@Override
@SuppressWarnings("static-access")
int oopSize() {
// Unsafe.addressSize() returns 8, even with CompressedOops. This is how many bytes each
// element is allocated in an Object[].
return UnsafeAccess.theUnsafe.ARRAY_OBJECT_INDEX_SCALE;
return ARRAY_OBJECT_INDEX_SCALE;
}
@Override
@SuppressWarnings("static-access")
long sizeOfByteArray(int len) {
return align(ARRAY + len * UnsafeAccess.theUnsafe.ARRAY_BYTE_INDEX_SCALE);
return align(ARRAY + len * ARRAY_BYTE_INDEX_SCALE);
}
}
@ -216,7 +221,10 @@ public class ClassSize {
// Have a safeguard in case Unsafe estimate is wrong. This is static context, there is
// no configuration, so we look at System property.
String enabled = System.getProperty("hbase.memorylayout.use.unsafe");
if (UnsafeAvailChecker.isAvailable() && (enabled == null || Boolean.parseBoolean(enabled))) {
if (
HBasePlatformDependent.isUnsafeAvailable()
&& (enabled == null || Boolean.parseBoolean(enabled))
) {
LOG.debug("Using Unsafe to estimate memory layout");
return new UnsafeLayout();
}

View File

@ -17,26 +17,18 @@
*/
package org.apache.hadoop.hbase.util;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.security.AccessController;
import java.security.PrivilegedAction;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.misc.Unsafe;
import sun.nio.ch.DirectBuffer;
import org.apache.hbase.thirdparty.io.netty.util.internal.PlatformDependent;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class UnsafeAccess {
private static final Logger LOG = LoggerFactory.getLogger(UnsafeAccess.class);
public static final Unsafe theUnsafe;
/** The offset to the first element in a byte array. */
public static final long BYTE_ARRAY_BASE_OFFSET;
@ -48,22 +40,8 @@ public final class UnsafeAccess {
// during a large copy
static final long UNSAFE_COPY_THRESHOLD = 1024L * 1024L;
static {
theUnsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
try {
Field f = Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null);
} catch (Throwable e) {
LOG.warn("sun.misc.Unsafe is not accessible", e);
}
return null;
}
});
if (theUnsafe != null) {
BYTE_ARRAY_BASE_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
if (HBasePlatformDependent.isUnsafeAvailable()) {
BYTE_ARRAY_BASE_OFFSET = HBasePlatformDependent.arrayBaseOffset(byte[].class);
} else {
BYTE_ARRAY_BASE_OFFSET = -1;
}
@ -81,9 +59,10 @@ public final class UnsafeAccess {
*/
public static short toShort(byte[] bytes, int offset) {
if (LITTLE_ENDIAN) {
return Short.reverseBytes(theUnsafe.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
return Short
.reverseBytes(HBasePlatformDependent.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
} else {
return theUnsafe.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
return HBasePlatformDependent.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
}
}
@ -95,9 +74,10 @@ public final class UnsafeAccess {
*/
public static int toInt(byte[] bytes, int offset) {
if (LITTLE_ENDIAN) {
return Integer.reverseBytes(theUnsafe.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
return Integer
.reverseBytes(HBasePlatformDependent.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
} else {
return theUnsafe.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
return HBasePlatformDependent.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
}
}
@ -109,9 +89,10 @@ public final class UnsafeAccess {
*/
public static long toLong(byte[] bytes, int offset) {
if (LITTLE_ENDIAN) {
return Long.reverseBytes(theUnsafe.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
return Long
.reverseBytes(HBasePlatformDependent.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
} else {
return theUnsafe.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
return HBasePlatformDependent.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
}
}
@ -127,7 +108,7 @@ public final class UnsafeAccess {
if (LITTLE_ENDIAN) {
val = Short.reverseBytes(val);
}
theUnsafe.putShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
HBasePlatformDependent.putShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
return offset + Bytes.SIZEOF_SHORT;
}
@ -142,7 +123,7 @@ public final class UnsafeAccess {
if (LITTLE_ENDIAN) {
val = Integer.reverseBytes(val);
}
theUnsafe.putInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
HBasePlatformDependent.putInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
return offset + Bytes.SIZEOF_INT;
}
@ -157,14 +138,15 @@ public final class UnsafeAccess {
if (LITTLE_ENDIAN) {
val = Long.reverseBytes(val);
}
theUnsafe.putLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
HBasePlatformDependent.putLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
return offset + Bytes.SIZEOF_LONG;
}
// APIs to read primitive data from a ByteBuffer using Unsafe way
/**
* Reads a short value at the given buffer's offset considering it was written in big-endian
* format. nn * @return short value at offset
* format.
* @return short value at offset
*/
public static short toShort(ByteBuffer buf, int offset) {
if (LITTLE_ENDIAN) {
@ -175,28 +157,32 @@ public final class UnsafeAccess {
/**
* Reads a short value at the given Object's offset considering it was written in big-endian
* format. nn * @return short value at offset
* format.
* @return short value at offset
*/
public static short toShort(Object ref, long offset) {
if (LITTLE_ENDIAN) {
return Short.reverseBytes(theUnsafe.getShort(ref, offset));
return Short.reverseBytes(HBasePlatformDependent.getShort(ref, offset));
}
return theUnsafe.getShort(ref, offset);
return HBasePlatformDependent.getShort(ref, offset);
}
/**
* Reads bytes at the given offset as a short value. nn * @return short value at offset
* Reads bytes at the given offset as a short value.
* @return short value at offset
*/
static short getAsShort(ByteBuffer buf, int offset) {
private static short getAsShort(ByteBuffer buf, int offset) {
if (buf.isDirect()) {
return theUnsafe.getShort(((DirectBuffer) buf).address() + offset);
return HBasePlatformDependent.getShort(directBufferAddress(buf) + offset);
}
return theUnsafe.getShort(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
return HBasePlatformDependent.getShort(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
}
/**
* Reads an int value at the given buffer's offset considering it was written in big-endian
* format. nn * @return int value at offset
* format.
* @return int value at offset
*/
public static int toInt(ByteBuffer buf, int offset) {
if (LITTLE_ENDIAN) {
@ -211,24 +197,27 @@ public final class UnsafeAccess {
*/
public static int toInt(Object ref, long offset) {
if (LITTLE_ENDIAN) {
return Integer.reverseBytes(theUnsafe.getInt(ref, offset));
return Integer.reverseBytes(HBasePlatformDependent.getInt(ref, offset));
}
return theUnsafe.getInt(ref, offset);
return HBasePlatformDependent.getInt(ref, offset);
}
/**
* Reads bytes at the given offset as an int value. nn * @return int value at offset
* Reads bytes at the given offset as an int value.
* @return int value at offset
*/
static int getAsInt(ByteBuffer buf, int offset) {
private static int getAsInt(ByteBuffer buf, int offset) {
if (buf.isDirect()) {
return theUnsafe.getInt(((DirectBuffer) buf).address() + offset);
return HBasePlatformDependent.getInt(directBufferAddress(buf) + offset);
}
return theUnsafe.getInt(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
return HBasePlatformDependent.getInt(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
}
/**
* Reads a long value at the given buffer's offset considering it was written in big-endian
* format. nn * @return long value at offset
* format.
* @return long value at offset
*/
public static long toLong(ByteBuffer buf, int offset) {
if (LITTLE_ENDIAN) {
@ -239,23 +228,49 @@ public final class UnsafeAccess {
/**
* Reads a long value at the given Object's offset considering it was written in big-endian
* format. nn * @return long value at offset
* format.
* @return long value at offset
*/
public static long toLong(Object ref, long offset) {
if (LITTLE_ENDIAN) {
return Long.reverseBytes(theUnsafe.getLong(ref, offset));
return Long.reverseBytes(HBasePlatformDependent.getLong(ref, offset));
}
return theUnsafe.getLong(ref, offset);
return HBasePlatformDependent.getLong(ref, offset);
}
/**
* Reads bytes at the given offset as a long value. nn * @return long value at offset
* Reads bytes at the given offset as a long value.
* @return long value at offset
*/
static long getAsLong(ByteBuffer buf, int offset) {
private static long getAsLong(ByteBuffer buf, int offset) {
if (buf.isDirect()) {
return theUnsafe.getLong(((DirectBuffer) buf).address() + offset);
return HBasePlatformDependent.getLong(directBufferAddress(buf) + offset);
}
return theUnsafe.getLong(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
return HBasePlatformDependent.getLong(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
}
/**
* Returns the byte at the given offset
* @param buf the buffer to read
* @param offset the offset at which the byte has to be read
* @return the byte at the given offset
*/
public static byte toByte(ByteBuffer buf, int offset) {
if (buf.isDirect()) {
return HBasePlatformDependent.getByte(directBufferAddress(buf) + offset);
} else {
return HBasePlatformDependent.getByte(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
}
}
/**
* Returns the byte at the given offset of the object
* @return the byte at the given offset
*/
public static byte toByte(Object ref, long offset) {
return HBasePlatformDependent.getByte(ref, offset);
}
/**
@ -270,9 +285,10 @@ public final class UnsafeAccess {
val = Integer.reverseBytes(val);
}
if (buf.isDirect()) {
theUnsafe.putInt(((DirectBuffer) buf).address() + offset, val);
HBasePlatformDependent.putInt(directBufferAddress(buf) + offset, val);
} else {
theUnsafe.putInt(buf.array(), offset + buf.arrayOffset() + BYTE_ARRAY_BASE_OFFSET, val);
HBasePlatformDependent.putInt(buf.array(),
offset + buf.arrayOffset() + BYTE_ARRAY_BASE_OFFSET, val);
}
return offset + Bytes.SIZEOF_INT;
}
@ -285,7 +301,7 @@ public final class UnsafeAccess {
long destAddress = destOffset;
Object destBase = null;
if (dest.isDirect()) {
destAddress = destAddress + ((DirectBuffer) dest).address();
destAddress = destAddress + directBufferAddress(dest);
} else {
destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
@ -297,7 +313,7 @@ public final class UnsafeAccess {
private static void unsafeCopy(Object src, long srcAddr, Object dst, long destAddr, long len) {
while (len > 0) {
long size = (len > UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : len;
theUnsafe.copyMemory(src, srcAddr, dst, destAddr, size);
HBasePlatformDependent.copyMemory(src, srcAddr, dst, destAddr, size);
len -= size;
srcAddr += size;
destAddr += size;
@ -306,13 +322,18 @@ public final class UnsafeAccess {
/**
* Copies specified number of bytes from given offset of {@code src} ByteBuffer to the
* {@code dest} array. nnnnn
* {@code dest} array.
* @param src source buffer
* @param srcOffset offset into source buffer
* @param dest destination array
* @param destOffset offset into destination buffer
* @param length length of data to copy
*/
public static void copy(ByteBuffer src, int srcOffset, byte[] dest, int destOffset, int length) {
long srcAddress = srcOffset;
Object srcBase = null;
if (src.isDirect()) {
srcAddress = srcAddress + ((DirectBuffer) src).address();
srcAddress = srcAddress + directBufferAddress(src);
} else {
srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset();
srcBase = src.array();
@ -323,20 +344,25 @@ public final class UnsafeAccess {
/**
* Copies specified number of bytes from given offset of {@code src} buffer into the {@code dest}
* buffer. nnnnn
* buffer.
* @param src source buffer
* @param srcOffset offset into source buffer
* @param dest destination buffer
* @param destOffset offset into destination buffer
* @param length length of data to copy
*/
public static void copy(ByteBuffer src, int srcOffset, ByteBuffer dest, int destOffset,
int length) {
long srcAddress, destAddress;
Object srcBase = null, destBase = null;
if (src.isDirect()) {
srcAddress = srcOffset + ((DirectBuffer) src).address();
srcAddress = srcOffset + directBufferAddress(src);
} else {
srcAddress = (long) srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET;
srcBase = src.array();
}
if (dest.isDirect()) {
destAddress = destOffset + ((DirectBuffer) dest).address();
destAddress = destOffset + directBufferAddress(dest);
} else {
destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
@ -357,9 +383,10 @@ public final class UnsafeAccess {
val = Short.reverseBytes(val);
}
if (buf.isDirect()) {
theUnsafe.putShort(((DirectBuffer) buf).address() + offset, val);
HBasePlatformDependent.putShort(directBufferAddress(buf) + offset, val);
} else {
theUnsafe.putShort(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
HBasePlatformDependent.putShort(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
}
return offset + Bytes.SIZEOF_SHORT;
}
@ -376,9 +403,10 @@ public final class UnsafeAccess {
val = Long.reverseBytes(val);
}
if (buf.isDirect()) {
theUnsafe.putLong(((DirectBuffer) buf).address() + offset, val);
HBasePlatformDependent.putLong(directBufferAddress(buf) + offset, val);
} else {
theUnsafe.putLong(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
HBasePlatformDependent.putLong(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
}
return offset + Bytes.SIZEOF_LONG;
}
@ -392,31 +420,20 @@ public final class UnsafeAccess {
*/
public static int putByte(ByteBuffer buf, int offset, byte b) {
if (buf.isDirect()) {
theUnsafe.putByte(((DirectBuffer) buf).address() + offset, b);
HBasePlatformDependent.putByte(directBufferAddress(buf) + offset, b);
} else {
theUnsafe.putByte(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, b);
HBasePlatformDependent.putByte(buf.array(),
BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, b);
}
return offset + 1;
}
/**
* Returns the byte at the given offset
* @param buf the buffer to read
* @param offset the offset at which the byte has to be read
* @return the byte at the given offset
*/
public static byte toByte(ByteBuffer buf, int offset) {
if (buf.isDirect()) {
return theUnsafe.getByte(((DirectBuffer) buf).address() + offset);
} else {
return theUnsafe.getByte(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
}
public static long directBufferAddress(ByteBuffer buf) {
return PlatformDependent.directBufferAddress(buf);
}
/**
* Returns the byte at the given offset of the object nn * @return the byte at the given offset
*/
public static byte toByte(Object ref, long offset) {
return theUnsafe.getByte(ref, offset);
public static void freeDirectBuffer(ByteBuffer buffer) {
// here we just use the method in netty
PlatformDependent.freeDirectBuffer(buffer);
}
}

View File

@ -1,189 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedAction;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class UnsafeAvailChecker {
private static final String CLASS_NAME = "sun.misc.Unsafe";
private static final Logger LOG = LoggerFactory.getLogger(UnsafeAvailChecker.class);
private static boolean avail = false;
private static boolean unaligned = false;
static {
avail = AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
try {
Class<?> clazz = Class.forName(CLASS_NAME);
Field f = clazz.getDeclaredField("theUnsafe");
f.setAccessible(true);
Object theUnsafe = f.get(null);
if (theUnsafe == null) {
LOG.warn("Could not get static instance from sun.misc.Unsafe");
return false;
}
// Check for availability of all methods used by UnsafeAccess
Method m;
try {
m = clazz.getDeclaredMethod("arrayBaseOffset", Class.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing arrayBaseOffset(Class)");
return false;
}
m = clazz.getDeclaredMethod("copyMemory", Object.class, long.class, Object.class,
long.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing copyMemory(Object,long,Object,long,long)");
return false;
}
m = clazz.getDeclaredMethod("getByte", Object.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getByte(Object,long)");
return false;
}
m = clazz.getDeclaredMethod("getShort", long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getShort(long)");
return false;
}
m = clazz.getDeclaredMethod("getShort", Object.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getShort(Object,long)");
return false;
}
m = clazz.getDeclaredMethod("getInt", long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getInt(long)");
return false;
}
m = clazz.getDeclaredMethod("getInt", Object.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getInt(Object,long)");
return false;
}
m = clazz.getDeclaredMethod("getLong", long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getLong(long)");
return false;
}
m = clazz.getDeclaredMethod("getLong", Object.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing getLong(Object,long)");
return false;
}
m = clazz.getDeclaredMethod("putByte", long.class, byte.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putByte(long,byte)");
return false;
}
m = clazz.getDeclaredMethod("putByte", Object.class, long.class, byte.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putByte(Object,long,byte)");
return false;
}
m = clazz.getDeclaredMethod("putShort", long.class, short.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putShort(long,short)");
return false;
}
m = clazz.getDeclaredMethod("putShort", Object.class, long.class, short.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putShort(Object,long,short)");
return false;
}
m = clazz.getDeclaredMethod("putInt", long.class, int.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putInt(long,int)");
return false;
}
m = clazz.getDeclaredMethod("putInt", Object.class, long.class, int.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putInt(Object,long,int)");
return false;
}
m = clazz.getDeclaredMethod("putLong", long.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putLong(long,long)");
return false;
}
m = clazz.getDeclaredMethod("putLong", Object.class, long.class, long.class);
if (m == null) {
LOG.warn("sun.misc.Unsafe is missing putLong(Object,long,long)");
return false;
}
// theUnsafe is accessible and all methods are available
return true;
} catch (Throwable e) {
LOG.warn("sun.misc.Unsafe is missing one or more required methods", e);
}
} catch (Throwable e) {
LOG.warn("sun.misc.Unsafe is not available/accessible", e);
}
return false;
}
});
// When Unsafe itself is not available/accessible consider unaligned as false.
if (avail) {
String arch = System.getProperty("os.arch");
if ("ppc64".equals(arch) || "ppc64le".equals(arch) || "aarch64".equals(arch)) {
// java.nio.Bits.unaligned() wrongly returns false on ppc (JDK-8165231),
unaligned = true;
} else {
try {
// Using java.nio.Bits#unaligned() to check for unaligned-access capability
Class<?> clazz = Class.forName("java.nio.Bits");
Method m = clazz.getDeclaredMethod("unaligned");
m.setAccessible(true);
unaligned = (Boolean) m.invoke(null);
} catch (Exception e) {
LOG.warn("java.nio.Bits#unaligned() check failed."
+ "Unsafe based read/write of primitive types won't be used", e);
}
}
}
}
/**
* @return true when running JVM is having sun's Unsafe package available in it and it is
* accessible.
*/
public static boolean isAvailable() {
return avail;
}
/**
* @return true when running JVM is having sun's Unsafe package available in it and underlying
* system having unaligned-access capability.
*/
public static boolean unaligned() {
return unaligned;
}
private UnsafeAvailChecker() {
// private constructor to avoid instantiation
}
}

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.io.WritableUtils;
import org.junit.AfterClass;
import org.junit.Before;
@ -106,14 +107,14 @@ public class TestByteBufferUtils {
}
static void detectAvailabilityOfUnsafe() throws Exception {
if (ByteBufferUtils.UNSAFE_AVAIL != UnsafeAvailChecker.isAvailable()) {
setUnsafe(UNSAFE_AVAIL_NAME, UnsafeAvailChecker.isAvailable());
if (ByteBufferUtils.UNSAFE_AVAIL != HBasePlatformDependent.isUnsafeAvailable()) {
setUnsafe(UNSAFE_AVAIL_NAME, HBasePlatformDependent.isUnsafeAvailable());
}
if (ByteBufferUtils.UNSAFE_UNALIGNED != UnsafeAvailChecker.unaligned()) {
setUnsafe(UNSAFE_UNALIGNED_NAME, UnsafeAvailChecker.unaligned());
if (ByteBufferUtils.UNSAFE_UNALIGNED != HBasePlatformDependent.unaligned()) {
setUnsafe(UNSAFE_UNALIGNED_NAME, HBasePlatformDependent.unaligned());
}
assertEquals(ByteBufferUtils.UNSAFE_AVAIL, UnsafeAvailChecker.isAvailable());
assertEquals(ByteBufferUtils.UNSAFE_UNALIGNED, UnsafeAvailChecker.unaligned());
assertEquals(ByteBufferUtils.UNSAFE_AVAIL, HBasePlatformDependent.isUnsafeAvailable());
assertEquals(ByteBufferUtils.UNSAFE_UNALIGNED, HBasePlatformDependent.unaligned());
}
public TestByteBufferUtils(boolean useUnsafeIfPossible) throws Exception {

View File

@ -37,6 +37,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.io.WritableUtils;
import org.junit.Assert;
import org.junit.ClassRule;
@ -79,7 +80,7 @@ public class TestBytes extends TestCase {
assertEquals(Bytes.toShort(bytes, 0, bytes.length), n);
}
} finally {
setUnsafe(UnsafeAvailChecker.unaligned());
setUnsafe(HBasePlatformDependent.unaligned());
}
}

View File

@ -20,17 +20,17 @@ package org.apache.hadoop.hbase.http.resource;
import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.DefaultValue;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.QueryParam;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.ajax.JSON;
/**

View File

@ -28,11 +28,11 @@ import java.util.function.BiConsumer;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.MediaType;
import org.junit.rules.ExternalResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Request;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.RequestLog;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Server;

View File

@ -31,14 +31,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.Callable;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.xml.ws.http.HTTPException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
@ -53,6 +45,14 @@ import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
import org.apache.hbase.thirdparty.javax.ws.rs.client.Client;
import org.apache.hbase.thirdparty.javax.ws.rs.client.ClientBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.client.Entity;
import org.apache.hbase.thirdparty.javax.ws.rs.client.Invocation;
import org.apache.hbase.thirdparty.javax.ws.rs.client.WebTarget;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriBuilder;
import org.apache.hbase.thirdparty.org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
/**

View File

@ -165,10 +165,6 @@
<groupId>com.sun.activation</groupId>
<artifactId>javax.activation</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
@ -181,6 +177,10 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-jackson-jaxrs-json-provider</artifactId>
</dependency>
<dependency>
<!-- We *might* need this for XMLStreamReader use in RemoteAdmin
TODO figure out if we can remove it.
@ -515,15 +515,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</dependency>
<!--Needed when jdk11/hadoop3 else complaint about
NoSuchMethodError: 'java.util.Map javax.ws.rs.core.Application.getProperties()'
when REST server is started.
-->
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
<version>2.3.2</version>
</dependency>
</dependencies>
</profile>
<profile>

View File

@ -18,15 +18,16 @@
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class ExistsResource extends ResourceBase {

View File

@ -18,12 +18,6 @@
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.rest.model.CellModel;
@ -34,6 +28,13 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class MultiRowResource extends ResourceBase implements Constants {
private static final Logger LOG = LoggerFactory.getLogger(MultiRowResource.class);

View File

@ -19,18 +19,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.client.Admin;
@ -41,6 +29,19 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.POST;
import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.HttpHeaders;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
/**
* Implements the following REST end points:
* <p>

View File

@ -19,18 +19,19 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.servlet.ServletContext;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.rest.model.NamespacesModel;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
/**
* Implements REST GET list of all namespaces.
* <p>

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.StreamingOutput;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
@ -34,6 +32,9 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
import org.apache.hbase.thirdparty.javax.ws.rs.core.StreamingOutput;
@InterfaceAudience.Private
public class ProtobufStreamingOutput implements StreamingOutput {
private static final Logger LOG = LoggerFactory.getLogger(ProtobufStreamingOutput.class);

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.rest;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.EnumSet;
@ -48,6 +47,7 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;

View File

@ -19,13 +19,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@ -40,6 +33,14 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class RegionsResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(RegionsResource.class);

View File

@ -18,14 +18,15 @@
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
@InterfaceAudience.Private
public class ResourceBase implements Constants {

View File

@ -18,15 +18,6 @@
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.rest.model.TableListModel;
import org.apache.hadoop.hbase.rest.model.TableModel;
@ -34,6 +25,16 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@Path("/")
@InterfaceAudience.Private
public class RootResource extends ResourceBase {

View File

@ -20,18 +20,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.Type;
@ -53,6 +41,19 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.POST;
import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.HttpHeaders;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class RowResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(RowResource.class);

View File

@ -19,15 +19,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.Base64;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotFoundException;
@ -39,6 +30,16 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.QueryParam;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class ScannerInstanceResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(ScannerInstanceResource.class);

View File

@ -24,15 +24,6 @@ import java.net.URI;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
@ -40,6 +31,16 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
import org.apache.hbase.thirdparty.javax.ws.rs.POST;
import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class ScannerResource extends ResourceBase {

View File

@ -19,18 +19,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import javax.xml.namespace.QName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@ -46,6 +34,19 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.POST;
import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class SchemaResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(SchemaResource.class);

View File

@ -20,13 +20,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.EnumSet;
import java.util.Map;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.ClusterMetrics.Option;
import org.apache.hadoop.hbase.RegionMetrics;
@ -38,6 +31,14 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class StorageClusterStatusResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(StorageClusterStatusResource.class);

View File

@ -19,19 +19,20 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.EnumSet;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.ClusterMetrics.Option;
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class StorageClusterVersionResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(StorageClusterVersionResource.class);

View File

@ -19,11 +19,6 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.List;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.Encoded;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
@ -38,6 +33,12 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.DefaultValue;
import org.apache.hbase.thirdparty.javax.ws.rs.Encoded;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
import org.apache.hbase.thirdparty.javax.ws.rs.QueryParam;
@InterfaceAudience.Private
public class TableResource extends ResourceBase {

View File

@ -23,14 +23,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.StreamingOutput;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
@ -45,6 +37,15 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.HeaderParam;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.StreamingOutput;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
@InterfaceAudience.Private
public class TableScanResource extends ResourceBase {
private static final Logger LOG = LoggerFactory.getLogger(TableScanResource.class);

View File

@ -19,19 +19,20 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.servlet.ServletContext;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.hbase.rest.model.VersionModel;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.GET;
import org.apache.hbase.thirdparty.javax.ws.rs.Path;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
/**
* Implements REST software version reporting
* <p>

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.rest.model;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.io.Serializable;
@ -27,7 +26,6 @@ import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@ -75,6 +73,9 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
/**
* A representation of Scanner parameters.
*

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.rest.provider;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
@ -40,6 +38,9 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
import org.apache.hadoop.hbase.rest.model.VersionModel;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.ContextResolver;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
/**
* Plumbing for hooking up Jersey's JSON entity body encoding and decoding support to JAXB. Modify
* how the context is created (by using e.g. a different configuration builder) to control how JSON

View File

@ -23,18 +23,19 @@ import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.MessageBodyReader;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
/**
* Adapter for hooking up Jersey content processing dispatch to ProtobufMessageHandler interface
* capable handlers for decoding protobuf input.

View File

@ -21,16 +21,17 @@ import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.MessageBodyWriter;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
/**
* An adapter between Jersey and Object.toString(). Hooks up plain text output to the Jersey content
* handling framework. Jersey will first call getSize() to learn the number of bytes that will be

View File

@ -21,16 +21,17 @@ import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.MessageBodyWriter;
import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
/**
* An adapter between Jersey and ProtobufMessageHandler implementors. Hooks up protobuf output
* producing methods to the Jersey content handling framework. Jersey will first call getSize() to

View File

@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
@ -49,6 +47,9 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
public class RowResourceBase {
protected static final String TABLE = "TestRowResource";

View File

@ -20,10 +20,8 @@ package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.IOException;
import java.util.Collection;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
@ -54,6 +52,9 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
@Category({ RestTests.class, MediumTests.class })
@RunWith(Parameterized.class)
public class TestMultiRowResource {

View File

@ -23,7 +23,6 @@ import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
@ -32,7 +31,6 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import org.apache.hadoop.conf.Configuration;
@ -61,6 +59,9 @@ import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
@Category({ RestTests.class, MediumTests.class })
public class TestNamespacesInstanceResource {
@ClassRule

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.lang.reflect.Method;
import java.security.KeyPair;
import java.security.cert.X509Certificate;
import java.util.Optional;
@ -39,7 +40,8 @@ import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import sun.security.x509.AlgorithmId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category({ RestTests.class, MediumTests.class })
public class TestRESTServerSSL {
@ -48,6 +50,8 @@ public class TestRESTServerSSL {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRESTServerSSL.class);
private static final Logger LOG = LoggerFactory.getLogger(TestRESTServerSSL.class);
private static final String KEY_STORE_PASSWORD = "myKSPassword";
private static final String TRUST_STORE_PASSWORD = "myTSPassword";
@ -57,12 +61,23 @@ public class TestRESTServerSSL {
private static File keyDir;
private Configuration conf;
@BeforeClass
public static void beforeClass() throws Exception {
// Workaround for jdk8 252 bug. See https://github.com/bcgit/bc-java/issues/941
// Workaround for jdk8 292 bug. See https://github.com/bcgit/bc-java/issues/941
// Below is a workaround described in above URL. Issue fingered first in comments in
// HBASE-25920 Support Hadoop 3.3.1
AlgorithmId.get("PBEWithSHA1AndDESede");
private static void initializeAlgorithmId() {
try {
Class<?> algoId = Class.forName("sun.security.x509.AlgorithmId");
Method method = algoId.getMethod("get", String.class);
method.setAccessible(true);
method.invoke(null, "PBEWithSHA1AndDESede");
} catch (Exception e) {
LOG.warn("failed to initialize AlgorithmId", e);
}
}
@BeforeClass
public static void beforeClass() throws Exception {
initializeAlgorithmId();
keyDir = initKeystoreDir();
KeyPair keyPair = KeyStoreTestUtil.generateKeyPair("RSA");
X509Certificate serverCertificate =

View File

@ -22,14 +22,12 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.File;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
import javax.ws.rs.core.MediaType;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -96,6 +94,9 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
/**
* Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
* HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.

View File

@ -26,7 +26,6 @@ import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.IOException;
@ -37,7 +36,6 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
@ -73,6 +71,9 @@ import org.junit.experimental.categories.Category;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
@Category({ RestTests.class, MediumTests.class })
public class TestTableScan {
@ClassRule

View File

@ -22,10 +22,8 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -46,6 +44,9 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
@Category({ RestTests.class, MediumTests.class })
public class TestVersionResource {

View File

@ -21,18 +21,19 @@ import static org.junit.Assert.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.Base64;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.provider.JAXBContextResolver;
import org.junit.Test;
import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
public abstract class TestModelBase<T> {
protected String AS_XML;

View File

@ -203,41 +203,37 @@ public class MetaTableMetrics implements RegionCoprocessor {
return "";
}
MetaTableOps ops = opsNameMap.get(op.getClass());
String opWithClientMeterName = "";
if (ops == null) {
return "";
}
switch (ops) {
case GET:
opWithClientMeterName = String.format("MetaTable_client_%s_get_request", clientIP);
break;
return String.format("MetaTable_client_%s_get_request", clientIP);
case PUT:
opWithClientMeterName = String.format("MetaTable_client_%s_put_request", clientIP);
break;
return String.format("MetaTable_client_%s_put_request", clientIP);
case DELETE:
opWithClientMeterName = String.format("MetaTable_client_%s_delete_request", clientIP);
break;
return String.format("MetaTable_client_%s_delete_request", clientIP);
default:
break;
return "";
}
return opWithClientMeterName;
}
private String opMeterName(Object op) {
// Extract meter name containing the access type
MetaTableOps ops = opsNameMap.get(op.getClass());
String opMeterName = "";
if (ops == null) {
return "";
}
switch (ops) {
case GET:
opMeterName = "MetaTable_get_request";
break;
return "MetaTable_get_request";
case PUT:
opMeterName = "MetaTable_put_request";
break;
return "MetaTable_put_request";
case DELETE:
opMeterName = "MetaTable_delete_request";
break;
return "MetaTable_delete_request";
default:
break;
return "";
}
return opMeterName;
}
private String tableMeterName(String tableName) {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.hfile;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
/**
@ -46,8 +47,12 @@ public class LruCachedBlockQueue implements HeapSize {
* @param blockSize expected average size of blocks
*/
public LruCachedBlockQueue(long maxSize, long blockSize) {
Preconditions.checkArgument(blockSize > 0, "negative blockSize %s", blockSize);
Preconditions.checkArgument(maxSize > 0, "negative maxSize %s", maxSize);
int initialSize = (int) (maxSize / blockSize);
if (initialSize == 0) initialSize++;
if (initialSize == 0) {
initialSize++;
}
queue = MinMaxPriorityQueue.expectedSize(initialSize).create();
heapSize = 0;
this.maxSize = maxSize;
@ -61,6 +66,10 @@ public class LruCachedBlockQueue implements HeapSize {
* side effect of this call.
* @param cb block to try to add to the queue
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value = "NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE",
justification = "head can not be null as heapSize is greater than maxSize,"
+ " which means we have something in the queue")
public void add(LruCachedBlock cb) {
if (heapSize < maxSize) {
queue.add(cb);

View File

@ -22,6 +22,7 @@ import java.util.Map;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
/**
@ -49,6 +50,8 @@ public class CachedEntryQueue {
* @param blockSize expected average size of blocks
*/
public CachedEntryQueue(long maxSize, long blockSize) {
Preconditions.checkArgument(blockSize > 0, "negative blockSize %s", blockSize);
Preconditions.checkArgument(maxSize > 0, "negative maxSize %s", maxSize);
int initialSize = (int) (maxSize / blockSize);
if (initialSize == 0) {
initialSize++;
@ -66,6 +69,10 @@ public class CachedEntryQueue {
* side effect of this call.
* @param entry a bucket entry with key to try to add to the queue
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value = "NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE",
justification = "head can not be null as cacheSize is greater than maxSize,"
+ " which means we have something in the queue")
public void add(Map.Entry<BlockCacheKey, BucketEntry> entry) {
if (cacheSize < maxSize) {
queue.add(entry);

View File

@ -156,6 +156,7 @@ import org.apache.hadoop.hbase.security.access.AccessChecker;
import org.apache.hadoop.hbase.security.access.ZKPermissionWatcher;
import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
@ -192,7 +193,6 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.misc.Signal;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
@ -735,7 +735,7 @@ public class HRegionServer extends Thread
*/
private static void setupWindows(final Configuration conf, ConfigurationManager cm) {
if (!SystemUtils.IS_OS_WINDOWS) {
Signal.handle(new Signal("HUP"), signal -> {
HBasePlatformDependent.handle("HUP", (number, name) -> {
conf.reloadConfiguration();
cm.notifyAllObservers(conf);
});

39
pom.xml
View File

@ -535,6 +535,7 @@
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm</maven.build.timestamp.format>
<buildDate>${maven.build.timestamp}</buildDate>
<compileSource>1.8</compileSource>
<releaseTarget>8</releaseTarget>
<!-- Build dependencies -->
<maven.min.version>3.0.4</maven.min.version>
<java.min.version>${compileSource}</java.min.version>
@ -563,8 +564,8 @@
<httpclient.version>4.5.13</httpclient.version>
<httpcore.version>4.4.13</httpcore.version>
<metrics-core.version>3.2.6</metrics-core.version>
<jackson.version>2.10.1</jackson.version>
<jackson.databind.version>2.10.1</jackson.databind.version>
<jackson.version>2.13.1</jackson.version>
<jackson.databind.version>2.13.1</jackson.databind.version>
<jaxb-api.version>2.3.1</jaxb-api.version>
<servlet.api.version>3.1.0</servlet.api.version>
<wx.rs.api.version>2.1.1</wx.rs.api.version>
@ -624,8 +625,8 @@
<wagon.ssh.version>2.12</wagon.ssh.version>
<xml.maven.version>1.0.1</xml.maven.version>
<spotless.version>2.22.2</spotless.version>
<hbase-thirdparty.version>4.1.0</hbase-thirdparty.version>
<maven-site.version>3.12.0</maven-site.version>
<hbase-thirdparty.version>3.5.1</hbase-thirdparty.version>
<!-- Intraproject jar naming properties -->
<!-- TODO this is pretty ugly, but works for the moment.
Modules are pretty heavy-weight things, so doing this work isn't too bad. -->
@ -1166,11 +1167,6 @@
<artifactId>joni</artifactId>
<version>${joni.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
@ -1390,6 +1386,16 @@
<artifactId>hbase-shaded-jersey</artifactId>
<version>${hbase-thirdparty.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-jackson-jaxrs-json-provider</artifactId>
<version>${hbase-thirdparty.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-unsafe</artifactId>
<version>${hbase-thirdparty.version}</version>
</dependency>
<dependency>
<groupId>com.sun.xml.ws</groupId>
<artifactId>jaxws-ri</artifactId>
@ -2071,6 +2077,22 @@
<bannedImport>org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting</bannedImport>
</bannedImports>
</restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<commentLineBufferSize>512</commentLineBufferSize>
<reason>Use shaded javax.ws.rs in hbase-thirdparty</reason>
<bannedImports>
<bannedImport>javax.ws.rs.**</bannedImport>
</bannedImports>
</restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<commentLineBufferSize>512</commentLineBufferSize>
<reason>Use shaded jackson-jaxrs-json-provider in hbase-thirdparty</reason>
<bannedImports>
<bannedImport>com.fasterxml.jackson.jaxrs.**</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
@ -2597,6 +2619,7 @@
<jdk>[1.11,)</jdk>
</activation>
<properties>
<maven.compiler.release>${releaseTarget}</maven.compiler.release>
<!-- TODO: replicate logic for windows support -->
<argLine>--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED ${hbase-surefire.argLine}</argLine>
<!-- We need a minimum HDFS version of 3.2.0 for HADOOP-12760 -->