mirror of https://github.com/apache/nifi.git
NIFI-5062: Removed hbase-client dependecy from hbase bundle
This closes #2636 Signed-off-by: Mike Thomsen <mikerthomsen@gmail.com>
This commit is contained in:
parent
fc902234b6
commit
1dbfcb9445
|
@ -78,18 +78,6 @@
|
|||
<artifactId>mockito-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-mock-record-utils</artifactId>
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.hbase;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.nifi.controller.AbstractControllerService;
|
||||
import org.apache.nifi.hbase.put.PutColumn;
|
||||
import org.apache.nifi.hbase.put.PutFlowFile;
|
||||
|
@ -262,7 +261,7 @@ public class MockHBaseClientService extends AbstractControllerService implements
|
|||
|
||||
@Override
|
||||
public byte[] toBytesBinary(String s) {
|
||||
return Bytes.toBytesBinary(s);
|
||||
return convertToBytesBinary(s);
|
||||
}
|
||||
|
||||
private boolean testFailure = false;
|
||||
|
@ -290,4 +289,36 @@ public class MockHBaseClientService extends AbstractControllerService implements
|
|||
public void setLinesBeforeException(int linesBeforeException) {
|
||||
this.linesBeforeException = linesBeforeException;
|
||||
}
|
||||
|
||||
private byte[] convertToBytesBinary(String in) {
|
||||
byte[] b = new byte[in.length()];
|
||||
int size = 0;
|
||||
|
||||
for(int i = 0; i < in.length(); ++i) {
|
||||
char ch = in.charAt(i);
|
||||
if (ch == '\\' && in.length() > i + 1 && in.charAt(i + 1) == 'x') {
|
||||
char hd1 = in.charAt(i + 2);
|
||||
char hd2 = in.charAt(i + 3);
|
||||
if (isHexDigit(hd1) && isHexDigit(hd2)) {
|
||||
byte d = (byte)((toBinaryFromHex((byte)hd1) << 4) + toBinaryFromHex((byte)hd2));
|
||||
b[size++] = d;
|
||||
i += 3;
|
||||
}
|
||||
} else {
|
||||
b[size++] = (byte)ch;
|
||||
}
|
||||
}
|
||||
|
||||
byte[] b2 = new byte[size];
|
||||
System.arraycopy(b, 0, b2, 0, size);
|
||||
return b2;
|
||||
}
|
||||
|
||||
private static boolean isHexDigit(char c) {
|
||||
return c >= 'A' && c <= 'F' || c >= '0' && c <= '9';
|
||||
}
|
||||
|
||||
private static byte toBinaryFromHex(byte ch) {
|
||||
return ch >= 65 && ch <= 70 ? (byte)(10 + (byte)(ch - 65)) : (byte)(ch - 48);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
|
||||
package org.apache.nifi.hbase;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.nifi.hbase.put.PutColumn;
|
||||
import org.apache.nifi.hbase.put.PutFlowFile;
|
||||
import org.apache.nifi.hbase.util.Bytes;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.serialization.record.MockRecordParser;
|
||||
import org.apache.nifi.serialization.record.RecordFieldType;
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.hbase.util;
|
||||
|
||||
import sun.misc.Unsafe;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
|
||||
public class Bytes {
|
||||
|
||||
public static String toString(byte[] b) {
|
||||
return b == null ? null : toString(b, 0, b.length);
|
||||
}
|
||||
|
||||
public static String toString(byte[] b1, String sep, byte[] b2) {
|
||||
return toString(b1, 0, b1.length) + sep + toString(b2, 0, b2.length);
|
||||
}
|
||||
|
||||
public static String toString(byte[] b, int off, int len) {
|
||||
if (b == null) {
|
||||
return null;
|
||||
} else {
|
||||
return len == 0 ? "" : new String(b, off, len, Charset.forName("UTF-8"));
|
||||
}
|
||||
}
|
||||
|
||||
public static long toLong(byte[] bytes) {
|
||||
return toLong(bytes, 0, 8);
|
||||
}
|
||||
|
||||
private static long toLong(byte[] bytes, int offset, int length) {
|
||||
if (length == 8 && offset + length <= bytes.length) {
|
||||
if (theUnsafe != null) {
|
||||
return toLongUnsafe(bytes, offset);
|
||||
} else {
|
||||
long l = 0L;
|
||||
|
||||
for(int i = offset; i < offset + length; ++i) {
|
||||
l <<= 8;
|
||||
l ^= (long)(bytes[i] & 255);
|
||||
}
|
||||
|
||||
return l;
|
||||
}
|
||||
} else {
|
||||
throw explainWrongLengthOrOffset(bytes, offset, length, 8);
|
||||
}
|
||||
}
|
||||
|
||||
private static long toLongUnsafe(byte[] bytes, int offset) {
|
||||
final boolean littleEndian = ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN);
|
||||
final int BYTE_ARRAY_BASE_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
|
||||
|
||||
if (littleEndian) {
|
||||
return Long.reverseBytes(theUnsafe.getLong(bytes,
|
||||
(long) offset + BYTE_ARRAY_BASE_OFFSET));
|
||||
} else {
|
||||
return theUnsafe.getLong(bytes,
|
||||
(long) offset + BYTE_ARRAY_BASE_OFFSET);
|
||||
}
|
||||
}
|
||||
|
||||
private static IllegalArgumentException explainWrongLengthOrOffset(byte[] bytes, int offset, int length, int expectedLength) {
|
||||
String reason;
|
||||
if (length != expectedLength) {
|
||||
reason = "Wrong length: " + length + ", expected " + expectedLength;
|
||||
} else {
|
||||
reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length;
|
||||
}
|
||||
|
||||
return new IllegalArgumentException(reason);
|
||||
}
|
||||
|
||||
private static final Unsafe theUnsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
|
||||
public Object run() {
|
||||
try {
|
||||
Field f = Unsafe.class.getDeclaredField("theUnsafe");
|
||||
f.setAccessible(true);
|
||||
return f.get((Object)null);
|
||||
} catch (NoSuchFieldException | IllegalAccessException var2) {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
|
@ -37,11 +37,6 @@
|
|||
<artifactId>nifi-hbase-processors</artifactId>
|
||||
<version>1.7.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
<version>1.1.2</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
</project>
|
||||
|
|
|
@ -24,6 +24,9 @@
|
|||
|
||||
<artifactId>nifi-hbase_1_1_2-client-service</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<properties>
|
||||
<hbase.version>1.1.2</hbase.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
|
@ -70,7 +73,7 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
<version>1.1.2</version>
|
||||
<version>${hbase.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
|
|
Loading…
Reference in New Issue