From 15ed7482861ace64d09c8bc6fa9f656feeec2a1f Mon Sep 17 00:00:00 2001 From: Peter Somogyi Date: Wed, 20 Dec 2017 21:17:52 +0100 Subject: [PATCH] HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes Signed-off-by: Chia-Ping Tsai --- .../hadoop/hbase/client/HBaseAdmin.java | 7 +- .../hadoop/hbase/security/SaslUtil.java | 3 +- .../hadoop/hbase/TestHColumnDescriptor.java | 8 +- .../hadoop/hbase/client/TestAsyncProcess.java | 13 ++-- .../hbase/client/TestClientScanner.java | 26 +++---- .../hbase/client/TestDelayingRunner.java | 9 ++- .../hadoop/hbase/client/TestOperation.java | 76 +++++++------------ .../client/TestSimpleRequestController.java | 7 +- .../security/TestHBaseSaslRpcClient.java | 27 +++---- .../org/apache/hadoop/hbase/util/Base64.java | 2 +- .../org/apache/hadoop/hbase/TestCellUtil.java | 31 ++++---- .../apache/hadoop/hbase/TestTableName.java | 7 +- .../io/crypto/TestKeyStoreKeyProvider.java | 4 +- .../apache/hadoop/hbase/types/TestStruct.java | 14 +--- .../hbase/util/TestLoadTestKVGenerator.java | 9 +-- .../hadoop/hbase/util/TestOrderedBytes.java | 33 ++++---- .../client/example/HttpProxyExample.java | 3 +- .../hbase/mapreduce/HFileOutputFormat2.java | 13 ++-- 18 files changed, 120 insertions(+), 172 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index af3916d2e3b..63310e668e2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -23,7 +23,6 @@ import com.google.protobuf.RpcController; import java.io.Closeable; import java.io.IOException; import java.io.InterruptedIOException; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; @@ -1675,8 +1674,8 @@ public class HBaseAdmin implements Admin { byte[][] encodedNameofRegionsToMerge = new byte[nameofRegionsToMerge.length][]; for(int i = 0; i < nameofRegionsToMerge.length; i++) { encodedNameofRegionsToMerge[i] = HRegionInfo.isEncodedRegionName(nameofRegionsToMerge[i]) ? - nameofRegionsToMerge[i] : HRegionInfo.encodeRegionName(nameofRegionsToMerge[i]) - .getBytes(StandardCharsets.UTF_8); + nameofRegionsToMerge[i] : + Bytes.toBytes(HRegionInfo.encodeRegionName(nameofRegionsToMerge[i])); } TableName tableName = null; @@ -1774,7 +1773,7 @@ public class HBaseAdmin implements Admin { public Future splitRegionAsync(byte[] regionName, byte[] splitPoint) throws IOException { byte[] encodedNameofRegionToSplit = HRegionInfo.isEncodedRegionName(regionName) ? - regionName : HRegionInfo.encodeRegionName(regionName).getBytes(StandardCharsets.UTF_8); + regionName : Bytes.toBytes(HRegionInfo.encodeRegionName(regionName)); Pair pair = getRegion(regionName); if (pair != null) { if (pair.getFirst() != null && diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java index d37abdf72a8..7091df5b882 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java @@ -28,6 +28,7 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,7 +74,7 @@ public class SaslUtil { } static byte[] decodeIdentifier(String identifier) { - return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8)); + return Base64.decodeBase64(Bytes.toBytes(identifier)); } static char[] encodePassword(byte[] password) { diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java index 976dfad4a35..5733c86c5d6 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java @@ -20,19 +20,17 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import java.nio.charset.StandardCharsets; - import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.HBaseException; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.BloomType; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.PrettyPrinter; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.BuilderStyleTest; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.PrettyPrinter; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -100,7 +98,7 @@ public class TestHColumnDescriptor { public void testHColumnDescriptorShouldThrowIAEWhenFamilyNameEmpty() throws Exception { expectedEx.expect(IllegalArgumentException.class); expectedEx.expectMessage("Column Family name can not be empty"); - new HColumnDescriptor("".getBytes(StandardCharsets.UTF_8)); + new HColumnDescriptor(Bytes.toBytes("")); } /** diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java index dd2ac6ff6ea..ba6756b670d 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java @@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.io.InterruptedIOException; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -88,10 +87,10 @@ public class TestAsyncProcess { private static final Logger LOG = LoggerFactory.getLogger(TestAsyncProcess.class); private static final TableName DUMMY_TABLE = TableName.valueOf("DUMMY_TABLE"); - private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8); - private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8); - private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes(StandardCharsets.UTF_8); - private static final byte[] FAILS = "FAILS".getBytes(StandardCharsets.UTF_8); + private static final byte[] DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1"); + private static final byte[] DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2"); + private static final byte[] DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3"); + private static final byte[] FAILS = Bytes.toBytes("FAILS"); private static final Configuration CONF = new Configuration(); private static final ConnectionConfiguration CONNECTION_CONFIG = new ConnectionConfiguration(CONF); @@ -987,7 +986,7 @@ public class TestAsyncProcess { for (int i = 0; i < 1000; i++) { - ap.incTaskCounters(Collections.singleton("dummy".getBytes(StandardCharsets.UTF_8)), sn); + ap.incTaskCounters(Collections.singleton(Bytes.toBytes("dummy")), sn); } final Thread myThread = Thread.currentThread(); @@ -1018,7 +1017,7 @@ public class TestAsyncProcess { public void run() { Threads.sleep(sleepTime); while (controller.tasksInProgress.get() > 0) { - ap.decTaskCounters(Collections.singleton("dummy".getBytes(StandardCharsets.UTF_8)), sn); + ap.decTaskCounters(Collections.singleton(Bytes.toBytes("dummy")), sn); } } }; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java index eb158cb9fa2..41d92222b4f 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java @@ -30,7 +30,6 @@ import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.when; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -45,6 +44,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -132,8 +132,7 @@ public class TestClientScanner { @SuppressWarnings("unchecked") public void testNoResultsHint() throws IOException { final Result[] results = new Result[1]; - KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); results[0] = Result.create(new Cell[] {kv1}); @@ -193,8 +192,7 @@ public class TestClientScanner { @SuppressWarnings("unchecked") public void testSizeLimit() throws IOException { final Result[] results = new Result[1]; - KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); results[0] = Result.create(new Cell[] {kv1}); @@ -251,14 +249,11 @@ public class TestClientScanner { @Test @SuppressWarnings("unchecked") public void testCacheLimit() throws IOException { - KeyValue kv1 = new KeyValue("row1".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv1 = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); - KeyValue kv2 = new KeyValue("row2".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv2 = new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); - KeyValue kv3 = new KeyValue("row3".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv3 = new KeyValue(Bytes.toBytes("row3"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); final Result[] results = new Result[] {Result.create(new Cell[] {kv1}), Result.create(new Cell[] {kv2}), Result.create(new Cell[] {kv3})}; @@ -331,8 +326,7 @@ public class TestClientScanner { @SuppressWarnings("unchecked") public void testNoMoreResults() throws IOException { final Result[] results = new Result[1]; - KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); results[0] = Result.create(new Cell[] {kv1}); @@ -390,14 +384,12 @@ public class TestClientScanner { @SuppressWarnings("unchecked") public void testMoreResults() throws IOException { final Result[] results1 = new Result[1]; - KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); results1[0] = Result.create(new Cell[] {kv1}); final Result[] results2 = new Result[1]; - KeyValue kv2 = new KeyValue("row2".getBytes(StandardCharsets.UTF_8), - "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1, + KeyValue kv2 = new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("cf"), Bytes.toBytes("cq"), 1, Type.Maximum); results2[0] = Result.create(new Cell[] {kv2}); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java index 15b7b022a50..b8400538f01 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java @@ -17,14 +17,15 @@ */ package org.apache.hadoop.hbase.client; -import static org.junit.Assert.*; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; -import java.nio.charset.StandardCharsets; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -34,8 +35,8 @@ public class TestDelayingRunner { private static final TableName DUMMY_TABLE = TableName.valueOf("DUMMY_TABLE"); - private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8); - private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8); + private static final byte[] DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1"); + private static final byte[] DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2"); private static HRegionInfo hri1 = new HRegionInfo(DUMMY_TABLE, DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index 98bc74bf9ba..cf40a690938 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -21,9 +21,10 @@ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import com.fasterxml.jackson.databind.ObjectMapper; + import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -66,7 +67,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.fasterxml.jackson.databind.ObjectMapper; /** * Run tests that use the functionality of the Operation superclass for @@ -83,66 +83,53 @@ public class TestOperation { private static List TS_LIST = Arrays.asList(2L, 3L, 5L); private static TimestampsFilter TS_FILTER = new TimestampsFilter(TS_LIST); - private static String STR_TS_FILTER = - TS_FILTER.getClass().getSimpleName() + " (3/3): [2, 3, 5]"; + private static String STR_TS_FILTER = TS_FILTER.getClass().getSimpleName() + " (3/3): [2, 3, 5]"; - private static List L_TS_LIST = - Arrays.asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L); - private static TimestampsFilter L_TS_FILTER = - new TimestampsFilter(L_TS_LIST); + private static List L_TS_LIST = Arrays.asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L); + private static TimestampsFilter L_TS_FILTER = new TimestampsFilter(L_TS_LIST); private static String STR_L_TS_FILTER = L_TS_FILTER.getClass().getSimpleName() + " (5/11): [0, 1, 2, 3, 4]"; private static String COL_NAME_1 = "col1"; private static ColumnPrefixFilter COL_PRE_FILTER = - new ColumnPrefixFilter(COL_NAME_1.getBytes(StandardCharsets.UTF_8)); + new ColumnPrefixFilter(Bytes.toBytes(COL_NAME_1)); private static String STR_COL_PRE_FILTER = COL_PRE_FILTER.getClass().getSimpleName() + " " + COL_NAME_1; private static String COL_NAME_2 = "col2"; - private static ColumnRangeFilter CR_FILTER = new ColumnRangeFilter( - COL_NAME_1.getBytes(StandardCharsets.UTF_8), true, - COL_NAME_2.getBytes(StandardCharsets.UTF_8), false); + private static ColumnRangeFilter CR_FILTER = + new ColumnRangeFilter(Bytes.toBytes(COL_NAME_1), true, Bytes.toBytes(COL_NAME_2), false); private static String STR_CR_FILTER = CR_FILTER.getClass().getSimpleName() + " [" + COL_NAME_1 + ", " + COL_NAME_2 + ")"; private static int COL_COUNT = 9; - private static ColumnCountGetFilter CCG_FILTER = - new ColumnCountGetFilter(COL_COUNT); - private static String STR_CCG_FILTER = - CCG_FILTER.getClass().getSimpleName() + " " + COL_COUNT; + private static ColumnCountGetFilter CCG_FILTER = new ColumnCountGetFilter(COL_COUNT); + private static String STR_CCG_FILTER = CCG_FILTER.getClass().getSimpleName() + " " + COL_COUNT; private static int LIMIT = 3; private static int OFFSET = 4; - private static ColumnPaginationFilter CP_FILTER = - new ColumnPaginationFilter(LIMIT, OFFSET); + private static ColumnPaginationFilter CP_FILTER = new ColumnPaginationFilter(LIMIT, OFFSET); private static String STR_CP_FILTER = CP_FILTER.getClass().getSimpleName() + " (" + LIMIT + ", " + OFFSET + ")"; private static String STOP_ROW_KEY = "stop"; private static InclusiveStopFilter IS_FILTER = - new InclusiveStopFilter(STOP_ROW_KEY.getBytes(StandardCharsets.UTF_8)); + new InclusiveStopFilter(Bytes.toBytes(STOP_ROW_KEY)); private static String STR_IS_FILTER = IS_FILTER.getClass().getSimpleName() + " " + STOP_ROW_KEY; private static String PREFIX = "prefix"; - private static PrefixFilter PREFIX_FILTER = - new PrefixFilter(PREFIX.getBytes(StandardCharsets.UTF_8)); + private static PrefixFilter PREFIX_FILTER = new PrefixFilter(Bytes.toBytes(PREFIX)); private static String STR_PREFIX_FILTER = "PrefixFilter " + PREFIX; - private static byte[][] PREFIXES = { - "0".getBytes(StandardCharsets.UTF_8), "1".getBytes(StandardCharsets.UTF_8), - "2".getBytes(StandardCharsets.UTF_8)}; - private static MultipleColumnPrefixFilter MCP_FILTER = - new MultipleColumnPrefixFilter(PREFIXES); + private static byte[][] PREFIXES = { Bytes.toBytes("0"), Bytes.toBytes("1"), Bytes.toBytes("2") }; + private static MultipleColumnPrefixFilter MCP_FILTER = new MultipleColumnPrefixFilter(PREFIXES); private static String STR_MCP_FILTER = MCP_FILTER.getClass().getSimpleName() + " (3/3): [0, 1, 2]"; private static byte[][] L_PREFIXES = { - "0".getBytes(StandardCharsets.UTF_8), "1".getBytes(StandardCharsets.UTF_8), - "2".getBytes(StandardCharsets.UTF_8), "3".getBytes(StandardCharsets.UTF_8), - "4".getBytes(StandardCharsets.UTF_8), "5".getBytes(StandardCharsets.UTF_8), - "6".getBytes(StandardCharsets.UTF_8), "7".getBytes(StandardCharsets.UTF_8)}; + Bytes.toBytes("0"), Bytes.toBytes("1"), Bytes.toBytes("2"), Bytes.toBytes("3"), + Bytes.toBytes("4"), Bytes.toBytes("5"), Bytes.toBytes("6"), Bytes.toBytes("7") }; private static MultipleColumnPrefixFilter L_MCP_FILTER = new MultipleColumnPrefixFilter(L_PREFIXES); private static String STR_L_MCP_FILTER = @@ -150,29 +137,25 @@ public class TestOperation { private static int PAGE_SIZE = 9; private static PageFilter PAGE_FILTER = new PageFilter(PAGE_SIZE); - private static String STR_PAGE_FILTER = - PAGE_FILTER.getClass().getSimpleName() + " " + PAGE_SIZE; + private static String STR_PAGE_FILTER = PAGE_FILTER.getClass().getSimpleName() + " " + PAGE_SIZE; private static SkipFilter SKIP_FILTER = new SkipFilter(L_TS_FILTER); private static String STR_SKIP_FILTER = SKIP_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER; - private static WhileMatchFilter WHILE_FILTER = - new WhileMatchFilter(L_TS_FILTER); + private static WhileMatchFilter WHILE_FILTER = new WhileMatchFilter(L_TS_FILTER); private static String STR_WHILE_FILTER = WHILE_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER; private static KeyOnlyFilter KEY_ONLY_FILTER = new KeyOnlyFilter(); - private static String STR_KEY_ONLY_FILTER = - KEY_ONLY_FILTER.getClass().getSimpleName(); + private static String STR_KEY_ONLY_FILTER = KEY_ONLY_FILTER.getClass().getSimpleName(); - private static FirstKeyOnlyFilter FIRST_KEY_ONLY_FILTER = - new FirstKeyOnlyFilter(); + private static FirstKeyOnlyFilter FIRST_KEY_ONLY_FILTER = new FirstKeyOnlyFilter(); private static String STR_FIRST_KEY_ONLY_FILTER = FIRST_KEY_ONLY_FILTER.getClass().getSimpleName(); private static CompareOp CMP_OP = CompareOp.EQUAL; - private static byte[] CMP_VALUE = "value".getBytes(StandardCharsets.UTF_8); + private static byte[] CMP_VALUE = Bytes.toBytes("value"); private static BinaryComparator BC = new BinaryComparator(CMP_VALUE); private static DependentColumnFilter DC_FILTER = new DependentColumnFilter(FAMILY, QUALIFIER, true, CMP_OP, BC); @@ -185,14 +168,12 @@ public class TestOperation { private static String STR_FAMILY_FILTER = FAMILY_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; - private static QualifierFilter QUALIFIER_FILTER = - new QualifierFilter(CMP_OP, BC); + private static QualifierFilter QUALIFIER_FILTER = new QualifierFilter(CMP_OP, BC); private static String STR_QUALIFIER_FILTER = QUALIFIER_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; private static RowFilter ROW_FILTER = new RowFilter(CMP_OP, BC); - private static String STR_ROW_FILTER = - ROW_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; + private static String STR_ROW_FILTER = ROW_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; private static ValueFilter VALUE_FILTER = new ValueFilter(CMP_OP, BC); private static String STR_VALUE_FILTER = @@ -209,19 +190,16 @@ public class TestOperation { new SingleColumnValueExcludeFilter(FAMILY, QUALIFIER, CMP_OP, CMP_VALUE); private static String STR_SCVE_FILTER = String.format("%s (%s, %s, %s, %s)", SCVE_FILTER.getClass().getSimpleName(), Bytes.toStringBinary(FAMILY), - Bytes.toStringBinary(QUALIFIER), CMP_OP.name(), - Bytes.toStringBinary(CMP_VALUE)); + Bytes.toStringBinary(QUALIFIER), CMP_OP.name(), Bytes.toStringBinary(CMP_VALUE)); private static FilterList AND_FILTER_LIST = new FilterList( - Operator.MUST_PASS_ALL, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, - CR_FILTER)); + Operator.MUST_PASS_ALL, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, CR_FILTER)); private static String STR_AND_FILTER_LIST = String.format( "%s AND (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(), STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER); private static FilterList OR_FILTER_LIST = new FilterList( - Operator.MUST_PASS_ONE, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, - CR_FILTER)); + Operator.MUST_PASS_ONE, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, CR_FILTER)); private static String STR_OR_FILTER_LIST = String.format( "%s OR (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(), STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java index 3107aa705ea..37d5cb28277 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java @@ -24,7 +24,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.io.InterruptedIOException; -import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -53,9 +52,9 @@ public class TestSimpleRequestController { private static final TableName DUMMY_TABLE = TableName.valueOf("DUMMY_TABLE"); - private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8); - private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8); - private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes(StandardCharsets.UTF_8); + private static final byte[] DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1"); + private static final byte[] DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2"); + private static final byte[] DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3"); private static final ServerName SN = ServerName.valueOf("s1,1,1"); private static final ServerName SN2 = ServerName.valueOf("s2,2,2"); private static final HRegionInfo HRI1 diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 8d2d1a106f8..4b71eb321c4 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -27,10 +27,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; - import java.io.IOException; -import java.nio.charset.StandardCharsets; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; @@ -46,6 +43,7 @@ import javax.security.sasl.SaslClient; import org.apache.hadoop.hbase.security.AbstractHBaseSaslRpcClient.SaslClientCallbackHandler; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.security.token.Token; @@ -59,6 +57,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.mockito.Mockito; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; + @Category({SecurityTests.class, SmallTests.class}) public class TestHBaseSaslRpcClient { @@ -99,18 +99,15 @@ public class TestHBaseSaslRpcClient { @Test public void testSaslClientCallbackHandler() throws UnsupportedCallbackException { final Token token = createTokenMock(); - when(token.getIdentifier()) - .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8)); - when(token.getPassword()) - .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8)); + when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME)); + when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD)); final NameCallback nameCallback = mock(NameCallback.class); final PasswordCallback passwordCallback = mock(PasswordCallback.class); final RealmCallback realmCallback = mock(RealmCallback.class); final RealmChoiceCallback realmChoiceCallback = mock(RealmChoiceCallback.class); - Callback[] callbackArray = {nameCallback, passwordCallback, - realmCallback, realmChoiceCallback}; + Callback[] callbackArray = {nameCallback, passwordCallback, realmCallback, realmChoiceCallback}; final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token); saslClCallbackHandler.handle(callbackArray); verify(nameCallback).setName(anyString()); @@ -121,10 +118,8 @@ public class TestHBaseSaslRpcClient { @Test public void testSaslClientCallbackHandlerWithException() { final Token token = createTokenMock(); - when(token.getIdentifier()) - .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8)); - when(token.getPassword()) - .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8)); + when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME)); + when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD)); final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token); try { saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) }); @@ -294,10 +289,8 @@ public class TestHBaseSaslRpcClient { throws IOException { Token token = createTokenMock(); if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(password)) { - when(token.getIdentifier()) - .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8)); - when(token.getPassword()) - .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8)); + when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME)); + when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD)); } return token; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index fe74bcf91c1..f3ac52b8e2d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -928,7 +928,7 @@ public class Base64 { bytes = s.getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException uee) { - bytes = s.getBytes(StandardCharsets.UTF_8); + bytes = Bytes.toBytes(s); } // end catch // Decode diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index aad0929a71a..41c0fc4e69e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -26,7 +26,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; @@ -316,8 +315,8 @@ public class TestCellUtil { @Test public void testFindCommonPrefixInFlatKey() { // The whole key matching case - KeyValue kv1 = new KeyValue("r1".getBytes(StandardCharsets.UTF_8), - "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); + KeyValue kv1 = new KeyValue(Bytes.toBytes("r1"), Bytes.toBytes("f1"), + Bytes.toBytes("q1"), null); Assert.assertEquals(kv1.getKeyLength(), PrivateCellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, true)); Assert.assertEquals(kv1.getKeyLength(), @@ -325,35 +324,35 @@ public class TestCellUtil { Assert.assertEquals(kv1.getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE, PrivateCellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, false)); // The rk length itself mismatch - KeyValue kv2 = new KeyValue("r12".getBytes(StandardCharsets.UTF_8), - "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); + KeyValue kv2 = new KeyValue(Bytes.toBytes("r12"), Bytes.toBytes("f1"), + Bytes.toBytes("q1"), null); Assert.assertEquals(1, PrivateCellUtil.findCommonPrefixInFlatKey(kv1, kv2, true, true)); // part of rk is same - KeyValue kv3 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), - "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); - Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes(StandardCharsets.UTF_8).length, + KeyValue kv3 = new KeyValue(Bytes.toBytes("r14"), Bytes.toBytes("f1"), + Bytes.toBytes("q1"), null); + Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + Bytes.toBytes("r1").length, PrivateCellUtil.findCommonPrefixInFlatKey(kv2, kv3, true, true)); // entire rk is same but different cf name - KeyValue kv4 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), - "f2".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); + KeyValue kv4 = new KeyValue(Bytes.toBytes("r14"), Bytes.toBytes("f2"), + Bytes.toBytes("q1"), null); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE - + "f".getBytes(StandardCharsets.UTF_8).length, + + Bytes.toBytes("f").length, PrivateCellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true)); // rk and family are same and part of qualifier - KeyValue kv5 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), - "f2".getBytes(StandardCharsets.UTF_8), "q123".getBytes(StandardCharsets.UTF_8), null); + KeyValue kv5 = new KeyValue(Bytes.toBytes("r14"), Bytes.toBytes("f2"), + Bytes.toBytes("q123"), null); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv4.getFamilyLength() + kv4.getQualifierLength(), PrivateCellUtil.findCommonPrefixInFlatKey(kv4, kv5, true, true)); // rk, cf and q are same. ts differs - KeyValue kv6 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L); - KeyValue kv7 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1235L); + KeyValue kv6 = new KeyValue(Bytes.toBytes("rk"), 1234L); + KeyValue kv7 = new KeyValue(Bytes.toBytes("rk"), 1235L); // only last byte out of 8 ts bytes in ts part differs Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv6.getFamilyLength() + kv6.getQualifierLength() + 7, PrivateCellUtil.findCommonPrefixInFlatKey(kv6, kv7, true, true)); // rk, cf, q and ts are same. Only type differs - KeyValue kv8 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L, Type.Delete); + KeyValue kv8 = new KeyValue(Bytes.toBytes("rk"), 1234L, Type.Delete); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv6.getFamilyLength() + kv6.getQualifierLength() + KeyValue.TIMESTAMP_SIZE, PrivateCellUtil.findCommonPrefixInFlatKey(kv6, kv8, true, true)); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java index 3bf05c4b96f..05d6bec2c25 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertSame; import static org.junit.Assert.fail; import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -119,11 +118,11 @@ public class TestTableName extends TestWatcher { Names(String ns, String tn) { this.ns = ns; - nsb = ns.getBytes(StandardCharsets.UTF_8); + nsb = Bytes.toBytes(ns); this.tn = tn; - tnb = tn.getBytes(StandardCharsets.UTF_8); + tnb = Bytes.toBytes(tn); nn = this.ns + ":" + this.tn; - nnb = nn.getBytes(StandardCharsets.UTF_8); + nnb = Bytes.toBytes(nn); } @Override diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 2fe8a8f6d62..7037a346a23 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertNotNull; import java.io.File; import java.io.FileOutputStream; import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; import java.security.Key; import java.security.KeyStore; import java.security.MessageDigest; @@ -32,6 +31,7 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -52,7 +52,7 @@ public class TestKeyStoreKeyProvider { @BeforeClass public static void setUp() throws Exception { - KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes(StandardCharsets.UTF_8)); + KEY = MessageDigest.getInstance("SHA-256").digest(Bytes.toBytes(ALIAS)); // Create a JKECS store containing a test secret key KeyStore store = KeyStore.getInstance("JCEKS"); store.load(null, PASSWORD.toCharArray()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java index 994948b93b0..1cdc9873cf6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java @@ -21,7 +21,6 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import java.lang.reflect.Constructor; -import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -70,15 +69,10 @@ public class TestStruct { }; Object[][] pojo2Args = { - new Object[] { new byte[0], "it".getBytes(StandardCharsets.UTF_8), "was", - "the".getBytes(StandardCharsets.UTF_8) }, - new Object[] { "best".getBytes(StandardCharsets.UTF_8), new byte[0], "of", - "times,".getBytes(StandardCharsets.UTF_8) }, - new Object[] { "it".getBytes(StandardCharsets.UTF_8), - "was".getBytes(StandardCharsets.UTF_8), "", - "the".getBytes(StandardCharsets.UTF_8) }, - new Object[] { "worst".getBytes(StandardCharsets.UTF_8), - "of".getBytes(StandardCharsets.UTF_8), "times,", new byte[0] }, + new Object[] { new byte[0], Bytes.toBytes("it"), "was", Bytes.toBytes("the") }, + new Object[] { Bytes.toBytes("best"), new byte[0], "of", Bytes.toBytes("times,") }, + new Object[] { Bytes.toBytes("it"), Bytes.toBytes("was"), "", Bytes.toBytes("the") }, + new Object[] { Bytes.toBytes("worst"), Bytes.toBytes("of"), "times,", new byte[0] }, new Object[] { new byte[0], new byte[0], "", new byte[0] }, }; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java index 2c403423b67..242d6bef53c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.Random; import java.util.Set; @@ -41,8 +40,8 @@ public class TestLoadTestKVGenerator { @Test public void testValueLength() { for (int i = 0; i < 1000; ++i) { - byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(StandardCharsets.UTF_8), - String.valueOf(rand.nextInt()).getBytes(StandardCharsets.UTF_8)); + byte[] v = gen.generateRandomSizeValue(Bytes.toBytes(Integer.toString(i)), + Bytes.toBytes(String.valueOf(rand.nextInt()))); assertTrue(MIN_LEN <= v.length); assertTrue(v.length <= MAX_LEN); } @@ -52,8 +51,8 @@ public class TestLoadTestKVGenerator { public void testVerification() { for (int i = 0; i < 1000; ++i) { for (int qualIndex = 0; qualIndex < 20; ++qualIndex) { - byte[] qual = String.valueOf(qualIndex).getBytes(StandardCharsets.UTF_8); - byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(StandardCharsets.UTF_8); + byte[] qual = Bytes.toBytes(String.valueOf(qualIndex)); + byte[] rowKey = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i)); byte[] v = gen.generateRandomSizeValue(rowKey, qual); assertTrue(LoadTestKVGenerator.verify(v, rowKey, qual)); v[0]++; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java index c62986d86d7..2107ee891b7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.math.BigDecimal; -import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; @@ -874,9 +873,9 @@ public class TestOrderedBytes { @Test public void testBlobVar() { byte[][] vals = - { "".getBytes(StandardCharsets.UTF_8), - "foo".getBytes(StandardCharsets.UTF_8), - "foobarbazbub".getBytes(StandardCharsets.UTF_8), + { Bytes.toBytes(""), + Bytes.toBytes("foo"), + Bytes.toBytes("foobarbazbub"), { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, /* 7 bytes of alternating bits; testing around HBASE-9893 */ }, { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, @@ -891,14 +890,14 @@ public class TestOrderedBytes { { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, /* 14 bytes of alternating bits; testing around HBASE-9893 */ }, - "1".getBytes(StandardCharsets.UTF_8), - "22".getBytes(StandardCharsets.UTF_8), - "333".getBytes(StandardCharsets.UTF_8), - "4444".getBytes(StandardCharsets.UTF_8), - "55555".getBytes(StandardCharsets.UTF_8), - "666666".getBytes(StandardCharsets.UTF_8), - "7777777".getBytes(StandardCharsets.UTF_8), - "88888888".getBytes(StandardCharsets.UTF_8) + Bytes.toBytes("1"), + Bytes.toBytes("22"), + Bytes.toBytes("333"), + Bytes.toBytes("4444"), + Bytes.toBytes("55555"), + Bytes.toBytes("666666"), + Bytes.toBytes("7777777"), + Bytes.toBytes("88888888") }; /* @@ -969,9 +968,9 @@ public class TestOrderedBytes { @Test public void testBlobCopy() { byte[][] vals = - { "".getBytes(StandardCharsets.UTF_8), - "foo".getBytes(StandardCharsets.UTF_8), - "foobarbazbub".getBytes(StandardCharsets.UTF_8), + { Bytes.toBytes(""), + Bytes.toBytes("foo"), + Bytes.toBytes("foobarbazbub"), { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa }, { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, @@ -1046,9 +1045,9 @@ public class TestOrderedBytes { byte[] a = new byte[3 + (Order.ASCENDING == ord ? 1 : 2) + 2]; PositionedByteRange buf = new SimplePositionedMutableByteRange(a, 1, 3 + (Order.ASCENDING == ord ? 1 : 2)); - OrderedBytes.encodeBlobCopy(buf, "foobarbaz".getBytes(StandardCharsets.UTF_8), 3, 3, ord); + OrderedBytes.encodeBlobCopy(buf, Bytes.toBytes("foobarbaz"), 3, 3, ord); buf.setPosition(0); - assertArrayEquals("bar".getBytes(StandardCharsets.UTF_8), OrderedBytes.decodeBlobCopy(buf)); + assertArrayEquals(Bytes.toBytes("bar"), OrderedBytes.decodeBlobCopy(buf)); } } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java index e3686f4f2f1..c5aac8d2a79 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.client.example; import java.io.IOException; import java.net.InetSocketAddress; -import java.nio.charset.StandardCharsets; import java.util.Optional; import java.util.concurrent.ExecutionException; @@ -138,7 +137,7 @@ public class HttpProxyExample { DefaultFullHttpResponse resp; if (content.isPresent()) { ByteBuf buf = - ctx.alloc().buffer().writeBytes(content.get().getBytes(StandardCharsets.UTF_8)); + ctx.alloc().buffer().writeBytes(Bytes.toBytes(content.get())); resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, buf); resp.headers().set(HttpHeaderNames.CONTENT_LENGTH, buf.readableBytes()); } else { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 9bd05302262..4a67933f7df 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -27,7 +27,6 @@ import java.io.UnsupportedEncodingException; import java.net.InetSocketAddress; import java.net.URLDecoder; import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -49,8 +48,8 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -91,6 +90,7 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -134,10 +134,9 @@ public class HFileOutputFormat2 } } - protected static final byte[] tableSeparator = ";".getBytes(StandardCharsets.UTF_8); + protected static final byte[] tableSeparator = Bytes.toBytes(";"); - protected static byte[] combineTableNameSuffix(byte[] tableName, - byte[] suffix ) { + protected static byte[] combineTableNameSuffix(byte[] tableName, byte[] suffix) { return Bytes.add(tableName, tableSeparator, suffix); } @@ -257,7 +256,7 @@ public class HFileOutputFormat2 "' not" + " expected"); } } else { - tableNameBytes = writeTableNames.getBytes(StandardCharsets.UTF_8); + tableNameBytes = Bytes.toBytes(writeTableNames); } byte[] tableAndFamily = getTableNameSuffixedWithFamily(tableNameBytes, family); WriterLength wl = this.writers.get(tableAndFamily); @@ -780,7 +779,7 @@ public class HFileOutputFormat2 continue; } try { - confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(StandardCharsets.UTF_8), + confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], "UTF-8")), URLDecoder.decode(familySplit[1], "UTF-8")); } catch (UnsupportedEncodingException e) { // will not happen with UTF-8 encoding