HBASE-22199 Replaced UTF-8 String with StandardCharsets.UTF_8
This commit is contained in:
parent
d5a6433937
commit
12bcb879da
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.thrift;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.CharacterCodingException;
|
||||
import java.nio.charset.Charset;
|
||||
|
@ -42,6 +41,7 @@ import org.apache.hadoop.hbase.thrift.generated.Hbase;
|
|||
import org.apache.hadoop.hbase.thrift.generated.Mutation;
|
||||
import org.apache.hadoop.hbase.thrift.generated.TCell;
|
||||
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.thrift.protocol.TBinaryProtocol;
|
||||
import org.apache.thrift.protocol.TProtocol;
|
||||
import org.apache.thrift.transport.TSaslClientTransport;
|
||||
|
@ -115,12 +115,7 @@ public class DemoClient {
|
|||
|
||||
// Helper to translate strings to UTF8 bytes
|
||||
private byte[] bytes(String s) {
|
||||
try {
|
||||
return s.getBytes("UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
LOG.error("CharSetName {} not supported", s, e);
|
||||
return null;
|
||||
}
|
||||
return Bytes.toBytes(s);
|
||||
}
|
||||
|
||||
private void run() throws Exception {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.thrift;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.CharacterCodingException;
|
||||
import java.nio.charset.Charset;
|
||||
|
@ -113,12 +112,7 @@ public class HttpDoAsClient {
|
|||
|
||||
// Helper to translate strings to UTF8 bytes
|
||||
private byte[] bytes(String s) {
|
||||
try {
|
||||
return s.getBytes("UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
LOG.error("CharSetName {} not supported", s, e);
|
||||
return null;
|
||||
}
|
||||
return Bytes.toBytes(s);
|
||||
}
|
||||
|
||||
private void run() throws Exception {
|
||||
|
|
|
@ -401,7 +401,7 @@ public class TestHFileOutputFormat2 {
|
|||
HFile.Reader rd =
|
||||
HFile.createReader(fs, file[0].getPath(), new CacheConfig(conf), true, conf);
|
||||
Map<byte[],byte[]> finfo = rd.loadFileInfo();
|
||||
byte[] range = finfo.get("TIMERANGE".getBytes("UTF-8"));
|
||||
byte[] range = finfo.get(Bytes.toBytes("TIMERANGE"));
|
||||
assertNotNull(range);
|
||||
|
||||
// unmarshall and check values.
|
||||
|
@ -829,7 +829,7 @@ public class TestHFileOutputFormat2 {
|
|||
for (Entry<String, Algorithm> entry : familyToCompression.entrySet()) {
|
||||
assertEquals("Compression configuration incorrect for column family:"
|
||||
+ entry.getKey(), entry.getValue(),
|
||||
retrievedFamilyToCompressionMap.get(entry.getKey().getBytes("UTF-8")));
|
||||
retrievedFamilyToCompressionMap.get(Bytes.toBytes(entry.getKey())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -903,7 +903,7 @@ public class TestHFileOutputFormat2 {
|
|||
for (Entry<String, BloomType> entry : familyToBloomType.entrySet()) {
|
||||
assertEquals("BloomType configuration incorrect for column family:"
|
||||
+ entry.getKey(), entry.getValue(),
|
||||
retrievedFamilyToBloomTypeMap.get(entry.getKey().getBytes("UTF-8")));
|
||||
retrievedFamilyToBloomTypeMap.get(Bytes.toBytes(entry.getKey())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -976,7 +976,7 @@ public class TestHFileOutputFormat2 {
|
|||
) {
|
||||
assertEquals("BlockSize configuration incorrect for column family:"
|
||||
+ entry.getKey(), entry.getValue(),
|
||||
retrievedFamilyToBlockSizeMap.get(entry.getKey().getBytes("UTF-8")));
|
||||
retrievedFamilyToBlockSizeMap.get(Bytes.toBytes(entry.getKey())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1053,7 +1053,7 @@ public class TestHFileOutputFormat2 {
|
|||
for (Entry<String, DataBlockEncoding> entry : familyToDataBlockEncoding.entrySet()) {
|
||||
assertEquals("DataBlockEncoding configuration incorrect for column family:"
|
||||
+ entry.getKey(), entry.getValue(),
|
||||
retrievedFamilyToDataBlockEncodingMap.get(entry.getKey().getBytes("UTF-8")));
|
||||
retrievedFamilyToDataBlockEncodingMap.get(Bytes.toBytes(entry.getKey())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.mapreduce;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
|
@ -51,7 +52,7 @@ public class TsvImporterCustomTestMapper extends TsvImporterMapper {
|
|||
|
||||
// do some basic line parsing
|
||||
byte[] lineBytes = value.getBytes();
|
||||
String[] valueTokens = new String(lineBytes, "UTF-8").split("\u001b");
|
||||
String[] valueTokens = new String(lineBytes, StandardCharsets.UTF_8).split("\u001b");
|
||||
|
||||
// create the rowKey and Put
|
||||
ImmutableBytesWritable rowKey =
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.google.protobuf.Message;
|
|||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -953,7 +954,7 @@ public class RemoteHTable implements Table {
|
|||
*/
|
||||
private static String toURLEncodedBytes(byte[] row) {
|
||||
try {
|
||||
return URLEncoder.encode(new String(row, "UTF-8"), "UTF-8");
|
||||
return URLEncoder.encode(new String(row, StandardCharsets.UTF_8), "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalStateException("URLEncoder doesn't support UTF-8", e);
|
||||
}
|
||||
|
|
|
@ -594,10 +594,10 @@ public class TestTableScan {
|
|||
RowModel rowModel = rowModels.get(i);
|
||||
RowModel reversedRowModel = reversedRowModels.get(i);
|
||||
|
||||
assertEquals(new String(rowModel.getKey(), "UTF-8"),
|
||||
new String(reversedRowModel.getKey(), "UTF-8"));
|
||||
assertEquals(new String(rowModel.getCells().get(0).getValue(), "UTF-8"),
|
||||
new String(reversedRowModel.getCells().get(0).getValue(), "UTF-8"));
|
||||
assertEquals(new String(rowModel.getKey(), StandardCharsets.UTF_8),
|
||||
new String(reversedRowModel.getKey(), StandardCharsets.UTF_8));
|
||||
assertEquals(new String(rowModel.getCells().get(0).getValue(), StandardCharsets.UTF_8),
|
||||
new String(reversedRowModel.getCells().get(0).getValue(), StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import javax.crypto.SecretKey;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.SecurityTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -40,7 +41,7 @@ public class TestAuthenticationKey {
|
|||
@Test
|
||||
public void test() throws UnsupportedEncodingException {
|
||||
SecretKey secret = Mockito.mock(SecretKey.class);
|
||||
Mockito.when(secret.getEncoded()).thenReturn("secret".getBytes("UTF-8"));
|
||||
Mockito.when(secret.getEncoded()).thenReturn(Bytes.toBytes("secret"));
|
||||
|
||||
AuthenticationKey key = new AuthenticationKey(0, 1234, secret);
|
||||
assertEquals(key.hashCode(), new AuthenticationKey(0, 1234, secret).hashCode());
|
||||
|
@ -55,7 +56,7 @@ public class TestAuthenticationKey {
|
|||
assertNotEquals(key, otherExpiry);
|
||||
|
||||
SecretKey other = Mockito.mock(SecretKey.class);
|
||||
Mockito.when(secret.getEncoded()).thenReturn("other".getBytes("UTF-8"));
|
||||
Mockito.when(secret.getEncoded()).thenReturn(Bytes.toBytes("other"));
|
||||
|
||||
AuthenticationKey otherSecret = new AuthenticationKey(0, 1234, other);
|
||||
assertNotEquals(key.hashCode(), otherSecret.hashCode());
|
||||
|
|
Loading…
Reference in New Issue