HBASE-27234 Clean up error-prone warnings in hbase-examples

Close #4647

Co-authored-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: Viraj Jasani <vjasani@apache.org>
This commit is contained in:
Andrew Purtell 2022-08-21 00:13:27 +08:00 committed by Duo Zhang
parent 1004876bad
commit 35fb37cf86
14 changed files with 79 additions and 73 deletions

View File

@ -22,6 +22,8 @@ import static org.apache.hadoop.hbase.util.NettyFutureUtils.safeWriteAndFlush;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@ -35,6 +37,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
import org.apache.hbase.thirdparty.io.netty.bootstrap.ServerBootstrap;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
@ -158,12 +161,20 @@ public class HttpProxyExample {
}
private Params parse(FullHttpRequest req) {
String[] components = new QueryStringDecoder(req.uri()).path().split("/");
Preconditions.checkArgument(components.length == 4, "Unrecognized uri: %s", req.uri());
List<String> components =
Splitter.on('/').splitToList(new QueryStringDecoder(req.uri()).path());
Preconditions.checkArgument(components.size() == 4, "Unrecognized uri: %s", req.uri());
Iterator<String> i = components.iterator();
// path is start with '/' so split will give an empty component
String[] cfAndCq = components[3].split(":");
Preconditions.checkArgument(cfAndCq.length == 2, "Unrecognized uri: %s", req.uri());
return new Params(components[1], components[2], cfAndCq[0], cfAndCq[1]);
i.next();
String table = i.next();
String row = i.next();
List<String> cfAndCq = Splitter.on(':').splitToList(i.next());
Preconditions.checkArgument(cfAndCq.size() == 2, "Unrecognized uri: %s", req.uri());
i = cfAndCq.iterator();
String family = i.next();
String qualifier = i.next();
return new Params(table, row, family, qualifier);
}
private void get(ChannelHandlerContext ctx, FullHttpRequest req) {

View File

@ -30,7 +30,6 @@ import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.TableName;
@ -240,7 +239,7 @@ public class MultiThreadedClientExample extends Configured implements Tool {
byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong());
Put p = new Put(rk);
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(rk).setFamily(FAMILY)
.setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Type.Put).setValue(value)
.setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Cell.Type.Put).setValue(value)
.build());
t.put(p);
}

View File

@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Scan;
@ -52,7 +51,6 @@ import org.apache.hbase.thirdparty.com.google.protobuf.Service;
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest;
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType;
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse;
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder;
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@ -157,7 +155,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
}
OperationStatus[] opStatus = region.batchMutate(deleteArr);
for (i = 0; i < opStatus.length; i++) {
if (opStatus[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {
if (opStatus[i].getOperationStatusCode() != HConstants.OperationStatusCode.SUCCESS) {
break;
}
totalRowsDeleted++;
@ -183,7 +181,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
}
}
}
Builder responseBuilder = BulkDeleteResponse.newBuilder();
BulkDeleteResponse.Builder responseBuilder = BulkDeleteResponse.newBuilder();
responseBuilder.setRowsDeleted(totalRowsDeleted);
if (deleteType == DeleteType.VERSION) {
responseBuilder.setVersionsDeleted(totalVersionsDeleted);

View File

@ -109,6 +109,8 @@ public class ExampleRegionObserverWithMetrics implements RegionCoprocessor {
// simulate the operation by sleeping.
Thread.sleep(ThreadLocalRandom.current().nextLong(100));
} catch (InterruptedException ignore) {
// Restore the interrupt status
Thread.currentThread().interrupt();
}
}
}

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
@ -34,6 +35,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
@ -97,10 +99,7 @@ public class RowCountEndpoint extends RowCountService implements RegionCoprocess
CoprocessorRpcUtils.setControllerException(controller, ioe);
} finally {
if (scanner != null) {
try {
scanner.close();
} catch (IOException ignored) {
}
IOUtils.closeQuietly(scanner);
}
}
done.run(response);
@ -121,21 +120,15 @@ public class RowCountEndpoint extends RowCountService implements RegionCoprocess
long count = 0;
do {
hasMore = scanner.next(results);
for (Cell kv : results) {
count++;
}
count += Iterables.size(results);
results.clear();
} while (hasMore);
response = CountResponse.newBuilder().setCount(count).build();
} catch (IOException ioe) {
CoprocessorRpcUtils.setControllerException(controller, ioe);
} finally {
if (scanner != null) {
try {
scanner.close();
} catch (IOException ignored) {
}
IOUtils.closeQuietly(scanner);
}
}
done.run(response);

View File

@ -131,6 +131,7 @@ public class IndexBuilder extends Configured implements Tool {
return job;
}
@Override
public int run(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create(getConf());
if (args.length < 3) {

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
@ -37,6 +39,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
/**
* Sample Uploader MapReduce
* <p>
@ -80,16 +84,16 @@ public class SampleUploader extends Configured implements Tool {
// Each line is comma-delimited; row,family,qualifier,value
// Split CSV line
String[] values = line.toString().split(",");
if (values.length != 4) {
List<String> values = Splitter.on(',').splitToList(line.toString());
if (values.size() != 4) {
return;
}
Iterator<String> i = values.iterator();
// Extract each value
byte[] row = Bytes.toBytes(values[0]);
byte[] family = Bytes.toBytes(values[1]);
byte[] qualifier = Bytes.toBytes(values[2]);
byte[] value = Bytes.toBytes(values[3]);
byte[] row = Bytes.toBytes(i.next());
byte[] family = Bytes.toBytes(i.next());
byte[] qualifier = Bytes.toBytes(i.next());
byte[] value = Bytes.toBytes(i.next());
// Create Put
Put put = new Put(row);
@ -136,6 +140,7 @@ public class SampleUploader extends Configured implements Tool {
* @param otherArgs The command line parameters after ToolRunner handles standard.
* @throws Exception When running the job fails.
*/
@Override
public int run(String[] otherArgs) throws Exception {
if (otherArgs.length != 2) {
System.err.println("Wrong number of arguments: " + otherArgs.length);

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.provider.example;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
@ -86,8 +87,8 @@ public class ShadeSaslServerAuthenticationProvider extends ShadeSaslAuthenticati
}
Map<String, char[]> passwordDb = new HashMap<>();
try (FSDataInputStream fdis = fs.open(passwordFile);
BufferedReader reader = new BufferedReader(new InputStreamReader(fdis))) {
try (FSDataInputStream fdis = fs.open(passwordFile); BufferedReader reader =
new BufferedReader(new InputStreamReader(fdis, StandardCharsets.UTF_8))) {
String line = null;
int offset = 0;
while ((line = reader.readLine()) != null) {

View File

@ -41,15 +41,12 @@ import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* See the instructions under hbase-examples/README.txt
*/
@InterfaceAudience.Private
public class DemoClient {
private static final Logger LOG = LoggerFactory.getLogger(DemoClient.class);
static protected int port;
static protected String host;
@ -128,15 +125,15 @@ public class DemoClient {
System.out.println("scanning tables...");
for (ByteBuffer name : client.getTableNames()) {
System.out.println(" found: " + ClientUtils.utf8(name.array()));
System.out.println(" found: " + ClientUtils.utf8(name));
if (name.equals(demoTable) || name.equals(disabledTable)) {
if (client.isTableEnabled(name)) {
System.out.println(" disabling table: " + ClientUtils.utf8(name.array()));
System.out.println(" disabling table: " + ClientUtils.utf8(name));
client.disableTable(name);
}
System.out.println(" deleting table: " + ClientUtils.utf8(name.array()));
System.out.println(" deleting table: " + ClientUtils.utf8(name));
client.deleteTable(name);
}
}
@ -324,7 +321,7 @@ public class DemoClient {
columnNames.clear();
for (ColumnDescriptor col2 : client.getColumnDescriptors(demoTable).values()) {
System.out.println("column with name: " + new String(col2.name.array()));
System.out.println("column with name: " + ClientUtils.utf8(col2.name));
System.out.println(col2.toString());
columnNames.add(col2.name);
@ -356,7 +353,7 @@ public class DemoClient {
rowStr.append("; ");
}
System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr);
System.out.println("row: " + ClientUtils.utf8(row) + ", values: " + rowStr);
}
private void printRow(TRowResult rowResult) {

View File

@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.security.auth.Subject;
@ -36,8 +35,6 @@ import javax.security.auth.login.LoginContext;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.Hbase;
import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClientUtils;
import org.apache.thrift.protocol.TBinaryProtocol;
@ -129,13 +126,13 @@ public class HttpDoAsClient {
//
System.out.println("scanning tables...");
for (ByteBuffer name : refresh(client, httpClient).getTableNames()) {
System.out.println(" found: " + ClientUtils.utf8(name.array()));
if (ClientUtils.utf8(name.array()).equals(ClientUtils.utf8(t))) {
System.out.println(" found: " + ClientUtils.utf8(name));
if (ClientUtils.utf8(name).equals(ClientUtils.utf8(t))) {
if (refresh(client, httpClient).isTableEnabled(name)) {
System.out.println(" disabling table: " + ClientUtils.utf8(name.array()));
System.out.println(" disabling table: " + ClientUtils.utf8(name));
refresh(client, httpClient).disableTable(name);
}
System.out.println(" deleting table: " + ClientUtils.utf8(name.array()));
System.out.println(" deleting table: " + ClientUtils.utf8(name));
refresh(client, httpClient).deleteTable(name);
}
}
@ -167,8 +164,8 @@ public class HttpDoAsClient {
Map<ByteBuffer, ColumnDescriptor> columnMap =
refresh(client, httpClient).getColumnDescriptors(ByteBuffer.wrap(t));
for (ColumnDescriptor col2 : columnMap.values()) {
System.out.println(
" column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
System.out
.println(" column: " + ClientUtils.utf8(col2.name) + ", maxVer: " + col2.maxVersions);
}
transport.close();
@ -205,26 +202,13 @@ public class HttpDoAsClient {
context.requestInteg(true);
final byte[] outToken = context.initSecContext(new byte[0], 0, 0);
StringBuffer outputBuffer = new StringBuffer();
StringBuilder outputBuffer = new StringBuilder();
outputBuffer.append("Negotiate ");
outputBuffer.append(Bytes.toString(Base64.getEncoder().encode(outToken)));
System.out.print("Ticket is: " + outputBuffer);
return outputBuffer.toString();
}
private void printVersions(ByteBuffer row, List<TCell> versions) {
StringBuilder rowStr = new StringBuilder();
for (TCell cell : versions) {
rowStr.append(ClientUtils.utf8(cell.value.array()));
rowStr.append("; ");
}
System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr);
}
private void printRow(TRowResult rowResult) {
ClientUtils.printRow(rowResult);
}
static Subject getSubject() throws Exception {
if (!secure) {
return new Subject();

View File

@ -140,11 +140,11 @@ public class DemoClient {
TResult result = client.get(table, get);
System.out.print("row = " + new String(result.getRow()));
System.out.print("row = " + ClientUtils.utf8(result.getRow()));
for (TColumnValue resultColumnValue : result.getColumnValues()) {
System.out.print("family = " + new String(resultColumnValue.getFamily()));
System.out.print("qualifier = " + new String(resultColumnValue.getFamily()));
System.out.print("value = " + new String(resultColumnValue.getValue()));
System.out.print("family = " + ClientUtils.utf8(resultColumnValue.getFamily()));
System.out.print("qualifier = " + ClientUtils.utf8(resultColumnValue.getFamily()));
System.out.print("value = " + ClientUtils.utf8(resultColumnValue.getValue()));
System.out.print("timestamp = " + resultColumnValue.getTimestamp());
}

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.util;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
@ -87,7 +86,7 @@ public final class ClientUtils {
}
StringBuilder rowStr = new StringBuilder();
for (SortedMap.Entry<String, TCell> entry : sorted.entrySet()) {
for (Map.Entry<String, TCell> entry : sorted.entrySet()) {
rowStr.append(entry.getKey());
rowStr.append(" => ");
rowStr.append(utf8(entry.getValue().value.array()));
@ -98,8 +97,8 @@ public final class ClientUtils {
}
/**
* Helper to translate byte[]'s to UTF8 strings
* @param buf byte array buffer
* Helper to translate byte[]s to UTF8 strings
* @param buf byte array
* @return UTF8 decoded string value
*/
public static String utf8(final byte[] buf) {
@ -110,4 +109,17 @@ public final class ClientUtils {
}
}
/**
* Helper to translate a byte buffer to UTF8 strings
* @param bb byte buffer
* @return UTF8 decoded string value
*/
public static String utf8(final ByteBuffer bb) {
// performance is not very critical here so we always copy the BB to a byte array
byte[] buf = new byte[bb.remaining()];
// duplicate so the get will not change the position of the original bb
bb.duplicate().get(buf);
return utf8(buf);
}
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.coprocessor.example;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.UncheckedIOException;
import java.util.stream.IntStream;
import org.apache.hadoop.hbase.HBaseTestingUtil;
@ -78,6 +79,7 @@ public class WriteHeavyIncrementObserverTestBase {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw (IOException) new InterruptedIOException().initCause(e);
}
}
}

View File

@ -29,6 +29,7 @@ import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collections;
@ -128,8 +129,8 @@ public class TestShadeSaslAuthenticationProvider {
if (fs.exists(p)) {
fs.delete(p, true);
}
try (FSDataOutputStream out = fs.create(p);
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out))) {
try (FSDataOutputStream out = fs.create(p); BufferedWriter writer =
new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) {
for (Entry<String, char[]> e : userDatabase.entrySet()) {
writer.write(e.getKey());
writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR);