HBASE-27234 Clean up error-prone warnings in hbase-examples
Close #4647 Co-authored-by: Duo Zhang <zhangduo@apache.org> Signed-off-by: Duo Zhang <zhangduo@apache.org> Signed-off-by: Viraj Jasani <vjasani@apache.org>
This commit is contained in:
parent
1004876bad
commit
35fb37cf86
|
@ -22,6 +22,8 @@ import static org.apache.hadoop.hbase.util.NettyFutureUtils.safeWriteAndFlush;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
@ -35,6 +37,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
||||||
|
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
|
||||||
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
|
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
|
||||||
import org.apache.hbase.thirdparty.io.netty.bootstrap.ServerBootstrap;
|
import org.apache.hbase.thirdparty.io.netty.bootstrap.ServerBootstrap;
|
||||||
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
|
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
|
||||||
|
@ -158,12 +161,20 @@ public class HttpProxyExample {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Params parse(FullHttpRequest req) {
|
private Params parse(FullHttpRequest req) {
|
||||||
String[] components = new QueryStringDecoder(req.uri()).path().split("/");
|
List<String> components =
|
||||||
Preconditions.checkArgument(components.length == 4, "Unrecognized uri: %s", req.uri());
|
Splitter.on('/').splitToList(new QueryStringDecoder(req.uri()).path());
|
||||||
|
Preconditions.checkArgument(components.size() == 4, "Unrecognized uri: %s", req.uri());
|
||||||
|
Iterator<String> i = components.iterator();
|
||||||
// path is start with '/' so split will give an empty component
|
// path is start with '/' so split will give an empty component
|
||||||
String[] cfAndCq = components[3].split(":");
|
i.next();
|
||||||
Preconditions.checkArgument(cfAndCq.length == 2, "Unrecognized uri: %s", req.uri());
|
String table = i.next();
|
||||||
return new Params(components[1], components[2], cfAndCq[0], cfAndCq[1]);
|
String row = i.next();
|
||||||
|
List<String> cfAndCq = Splitter.on(':').splitToList(i.next());
|
||||||
|
Preconditions.checkArgument(cfAndCq.size() == 2, "Unrecognized uri: %s", req.uri());
|
||||||
|
i = cfAndCq.iterator();
|
||||||
|
String family = i.next();
|
||||||
|
String qualifier = i.next();
|
||||||
|
return new Params(table, row, family, qualifier);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void get(ChannelHandlerContext ctx, FullHttpRequest req) {
|
private void get(ChannelHandlerContext ctx, FullHttpRequest req) {
|
||||||
|
|
|
@ -30,7 +30,6 @@ import java.util.concurrent.ThreadLocalRandom;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.Cell.Type;
|
|
||||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||||
import org.apache.hadoop.hbase.CellBuilderType;
|
import org.apache.hadoop.hbase.CellBuilderType;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
|
@ -240,7 +239,7 @@ public class MultiThreadedClientExample extends Configured implements Tool {
|
||||||
byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong());
|
byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong());
|
||||||
Put p = new Put(rk);
|
Put p = new Put(rk);
|
||||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(rk).setFamily(FAMILY)
|
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(rk).setFamily(FAMILY)
|
||||||
.setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Type.Put).setValue(value)
|
.setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Cell.Type.Put).setValue(value)
|
||||||
.build());
|
.build());
|
||||||
t.put(p);
|
t.put(p);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
|
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
import org.apache.hadoop.hbase.client.Mutation;
|
import org.apache.hadoop.hbase.client.Mutation;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
|
@ -52,7 +51,6 @@ import org.apache.hbase.thirdparty.com.google.protobuf.Service;
|
||||||
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest;
|
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest;
|
||||||
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType;
|
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType;
|
||||||
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse;
|
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse;
|
||||||
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder;
|
|
||||||
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService;
|
import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||||
|
|
||||||
|
@ -157,7 +155,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
|
||||||
}
|
}
|
||||||
OperationStatus[] opStatus = region.batchMutate(deleteArr);
|
OperationStatus[] opStatus = region.batchMutate(deleteArr);
|
||||||
for (i = 0; i < opStatus.length; i++) {
|
for (i = 0; i < opStatus.length; i++) {
|
||||||
if (opStatus[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) {
|
if (opStatus[i].getOperationStatusCode() != HConstants.OperationStatusCode.SUCCESS) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
totalRowsDeleted++;
|
totalRowsDeleted++;
|
||||||
|
@ -183,7 +181,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Builder responseBuilder = BulkDeleteResponse.newBuilder();
|
BulkDeleteResponse.Builder responseBuilder = BulkDeleteResponse.newBuilder();
|
||||||
responseBuilder.setRowsDeleted(totalRowsDeleted);
|
responseBuilder.setRowsDeleted(totalRowsDeleted);
|
||||||
if (deleteType == DeleteType.VERSION) {
|
if (deleteType == DeleteType.VERSION) {
|
||||||
responseBuilder.setVersionsDeleted(totalVersionsDeleted);
|
responseBuilder.setVersionsDeleted(totalVersionsDeleted);
|
||||||
|
|
|
@ -109,6 +109,8 @@ public class ExampleRegionObserverWithMetrics implements RegionCoprocessor {
|
||||||
// simulate the operation by sleeping.
|
// simulate the operation by sleeping.
|
||||||
Thread.sleep(ThreadLocalRandom.current().nextLong(100));
|
Thread.sleep(ThreadLocalRandom.current().nextLong(100));
|
||||||
} catch (InterruptedException ignore) {
|
} catch (InterruptedException ignore) {
|
||||||
|
// Restore the interrupt status
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
||||||
|
@ -34,6 +35,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
|
||||||
|
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
|
||||||
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
|
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
|
||||||
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
|
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
|
||||||
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
|
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
|
||||||
|
@ -97,10 +99,7 @@ public class RowCountEndpoint extends RowCountService implements RegionCoprocess
|
||||||
CoprocessorRpcUtils.setControllerException(controller, ioe);
|
CoprocessorRpcUtils.setControllerException(controller, ioe);
|
||||||
} finally {
|
} finally {
|
||||||
if (scanner != null) {
|
if (scanner != null) {
|
||||||
try {
|
IOUtils.closeQuietly(scanner);
|
||||||
scanner.close();
|
|
||||||
} catch (IOException ignored) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
done.run(response);
|
done.run(response);
|
||||||
|
@ -121,21 +120,15 @@ public class RowCountEndpoint extends RowCountService implements RegionCoprocess
|
||||||
long count = 0;
|
long count = 0;
|
||||||
do {
|
do {
|
||||||
hasMore = scanner.next(results);
|
hasMore = scanner.next(results);
|
||||||
for (Cell kv : results) {
|
count += Iterables.size(results);
|
||||||
count++;
|
|
||||||
}
|
|
||||||
results.clear();
|
results.clear();
|
||||||
} while (hasMore);
|
} while (hasMore);
|
||||||
|
|
||||||
response = CountResponse.newBuilder().setCount(count).build();
|
response = CountResponse.newBuilder().setCount(count).build();
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
CoprocessorRpcUtils.setControllerException(controller, ioe);
|
CoprocessorRpcUtils.setControllerException(controller, ioe);
|
||||||
} finally {
|
} finally {
|
||||||
if (scanner != null) {
|
if (scanner != null) {
|
||||||
try {
|
IOUtils.closeQuietly(scanner);
|
||||||
scanner.close();
|
|
||||||
} catch (IOException ignored) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
done.run(response);
|
done.run(response);
|
||||||
|
|
|
@ -131,6 +131,7 @@ public class IndexBuilder extends Configured implements Tool {
|
||||||
return job;
|
return job;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public int run(String[] args) throws Exception {
|
public int run(String[] args) throws Exception {
|
||||||
Configuration conf = HBaseConfiguration.create(getConf());
|
Configuration conf = HBaseConfiguration.create(getConf());
|
||||||
if (args.length < 3) {
|
if (args.length < 3) {
|
||||||
|
|
|
@ -18,6 +18,8 @@
|
||||||
package org.apache.hadoop.hbase.mapreduce;
|
package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -37,6 +39,8 @@ import org.apache.yetus.audience.InterfaceAudience;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sample Uploader MapReduce
|
* Sample Uploader MapReduce
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -80,16 +84,16 @@ public class SampleUploader extends Configured implements Tool {
|
||||||
// Each line is comma-delimited; row,family,qualifier,value
|
// Each line is comma-delimited; row,family,qualifier,value
|
||||||
|
|
||||||
// Split CSV line
|
// Split CSV line
|
||||||
String[] values = line.toString().split(",");
|
List<String> values = Splitter.on(',').splitToList(line.toString());
|
||||||
if (values.length != 4) {
|
if (values.size() != 4) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
Iterator<String> i = values.iterator();
|
||||||
// Extract each value
|
// Extract each value
|
||||||
byte[] row = Bytes.toBytes(values[0]);
|
byte[] row = Bytes.toBytes(i.next());
|
||||||
byte[] family = Bytes.toBytes(values[1]);
|
byte[] family = Bytes.toBytes(i.next());
|
||||||
byte[] qualifier = Bytes.toBytes(values[2]);
|
byte[] qualifier = Bytes.toBytes(i.next());
|
||||||
byte[] value = Bytes.toBytes(values[3]);
|
byte[] value = Bytes.toBytes(i.next());
|
||||||
|
|
||||||
// Create Put
|
// Create Put
|
||||||
Put put = new Put(row);
|
Put put = new Put(row);
|
||||||
|
@ -136,6 +140,7 @@ public class SampleUploader extends Configured implements Tool {
|
||||||
* @param otherArgs The command line parameters after ToolRunner handles standard.
|
* @param otherArgs The command line parameters after ToolRunner handles standard.
|
||||||
* @throws Exception When running the job fails.
|
* @throws Exception When running the job fails.
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public int run(String[] otherArgs) throws Exception {
|
public int run(String[] otherArgs) throws Exception {
|
||||||
if (otherArgs.length != 2) {
|
if (otherArgs.length != 2) {
|
||||||
System.err.println("Wrong number of arguments: " + otherArgs.length);
|
System.err.println("Wrong number of arguments: " + otherArgs.length);
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.provider.example;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -86,8 +87,8 @@ public class ShadeSaslServerAuthenticationProvider extends ShadeSaslAuthenticati
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, char[]> passwordDb = new HashMap<>();
|
Map<String, char[]> passwordDb = new HashMap<>();
|
||||||
try (FSDataInputStream fdis = fs.open(passwordFile);
|
try (FSDataInputStream fdis = fs.open(passwordFile); BufferedReader reader =
|
||||||
BufferedReader reader = new BufferedReader(new InputStreamReader(fdis))) {
|
new BufferedReader(new InputStreamReader(fdis, StandardCharsets.UTF_8))) {
|
||||||
String line = null;
|
String line = null;
|
||||||
int offset = 0;
|
int offset = 0;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
|
|
|
@ -41,15 +41,12 @@ import org.apache.thrift.transport.TSaslClientTransport;
|
||||||
import org.apache.thrift.transport.TSocket;
|
import org.apache.thrift.transport.TSocket;
|
||||||
import org.apache.thrift.transport.TTransport;
|
import org.apache.thrift.transport.TTransport;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See the instructions under hbase-examples/README.txt
|
* See the instructions under hbase-examples/README.txt
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class DemoClient {
|
public class DemoClient {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(DemoClient.class);
|
|
||||||
|
|
||||||
static protected int port;
|
static protected int port;
|
||||||
static protected String host;
|
static protected String host;
|
||||||
|
@ -128,15 +125,15 @@ public class DemoClient {
|
||||||
System.out.println("scanning tables...");
|
System.out.println("scanning tables...");
|
||||||
|
|
||||||
for (ByteBuffer name : client.getTableNames()) {
|
for (ByteBuffer name : client.getTableNames()) {
|
||||||
System.out.println(" found: " + ClientUtils.utf8(name.array()));
|
System.out.println(" found: " + ClientUtils.utf8(name));
|
||||||
|
|
||||||
if (name.equals(demoTable) || name.equals(disabledTable)) {
|
if (name.equals(demoTable) || name.equals(disabledTable)) {
|
||||||
if (client.isTableEnabled(name)) {
|
if (client.isTableEnabled(name)) {
|
||||||
System.out.println(" disabling table: " + ClientUtils.utf8(name.array()));
|
System.out.println(" disabling table: " + ClientUtils.utf8(name));
|
||||||
client.disableTable(name);
|
client.disableTable(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println(" deleting table: " + ClientUtils.utf8(name.array()));
|
System.out.println(" deleting table: " + ClientUtils.utf8(name));
|
||||||
client.deleteTable(name);
|
client.deleteTable(name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -324,7 +321,7 @@ public class DemoClient {
|
||||||
columnNames.clear();
|
columnNames.clear();
|
||||||
|
|
||||||
for (ColumnDescriptor col2 : client.getColumnDescriptors(demoTable).values()) {
|
for (ColumnDescriptor col2 : client.getColumnDescriptors(demoTable).values()) {
|
||||||
System.out.println("column with name: " + new String(col2.name.array()));
|
System.out.println("column with name: " + ClientUtils.utf8(col2.name));
|
||||||
System.out.println(col2.toString());
|
System.out.println(col2.toString());
|
||||||
|
|
||||||
columnNames.add(col2.name);
|
columnNames.add(col2.name);
|
||||||
|
@ -356,7 +353,7 @@ public class DemoClient {
|
||||||
rowStr.append("; ");
|
rowStr.append("; ");
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr);
|
System.out.println("row: " + ClientUtils.utf8(row) + ", values: " + rowStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void printRow(TRowResult rowResult) {
|
private void printRow(TRowResult rowResult) {
|
||||||
|
|
|
@ -25,7 +25,6 @@ import java.util.ArrayList;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import javax.security.auth.Subject;
|
import javax.security.auth.Subject;
|
||||||
|
@ -36,8 +35,6 @@ import javax.security.auth.login.LoginContext;
|
||||||
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
|
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
|
||||||
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
|
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.thrift.generated.Hbase;
|
import org.apache.hadoop.hbase.thrift.generated.Hbase;
|
||||||
import org.apache.hadoop.hbase.thrift.generated.TCell;
|
|
||||||
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
|
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.ClientUtils;
|
import org.apache.hadoop.hbase.util.ClientUtils;
|
||||||
import org.apache.thrift.protocol.TBinaryProtocol;
|
import org.apache.thrift.protocol.TBinaryProtocol;
|
||||||
|
@ -129,13 +126,13 @@ public class HttpDoAsClient {
|
||||||
//
|
//
|
||||||
System.out.println("scanning tables...");
|
System.out.println("scanning tables...");
|
||||||
for (ByteBuffer name : refresh(client, httpClient).getTableNames()) {
|
for (ByteBuffer name : refresh(client, httpClient).getTableNames()) {
|
||||||
System.out.println(" found: " + ClientUtils.utf8(name.array()));
|
System.out.println(" found: " + ClientUtils.utf8(name));
|
||||||
if (ClientUtils.utf8(name.array()).equals(ClientUtils.utf8(t))) {
|
if (ClientUtils.utf8(name).equals(ClientUtils.utf8(t))) {
|
||||||
if (refresh(client, httpClient).isTableEnabled(name)) {
|
if (refresh(client, httpClient).isTableEnabled(name)) {
|
||||||
System.out.println(" disabling table: " + ClientUtils.utf8(name.array()));
|
System.out.println(" disabling table: " + ClientUtils.utf8(name));
|
||||||
refresh(client, httpClient).disableTable(name);
|
refresh(client, httpClient).disableTable(name);
|
||||||
}
|
}
|
||||||
System.out.println(" deleting table: " + ClientUtils.utf8(name.array()));
|
System.out.println(" deleting table: " + ClientUtils.utf8(name));
|
||||||
refresh(client, httpClient).deleteTable(name);
|
refresh(client, httpClient).deleteTable(name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -167,8 +164,8 @@ public class HttpDoAsClient {
|
||||||
Map<ByteBuffer, ColumnDescriptor> columnMap =
|
Map<ByteBuffer, ColumnDescriptor> columnMap =
|
||||||
refresh(client, httpClient).getColumnDescriptors(ByteBuffer.wrap(t));
|
refresh(client, httpClient).getColumnDescriptors(ByteBuffer.wrap(t));
|
||||||
for (ColumnDescriptor col2 : columnMap.values()) {
|
for (ColumnDescriptor col2 : columnMap.values()) {
|
||||||
System.out.println(
|
System.out
|
||||||
" column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
|
.println(" column: " + ClientUtils.utf8(col2.name) + ", maxVer: " + col2.maxVersions);
|
||||||
}
|
}
|
||||||
|
|
||||||
transport.close();
|
transport.close();
|
||||||
|
@ -205,26 +202,13 @@ public class HttpDoAsClient {
|
||||||
context.requestInteg(true);
|
context.requestInteg(true);
|
||||||
|
|
||||||
final byte[] outToken = context.initSecContext(new byte[0], 0, 0);
|
final byte[] outToken = context.initSecContext(new byte[0], 0, 0);
|
||||||
StringBuffer outputBuffer = new StringBuffer();
|
StringBuilder outputBuffer = new StringBuilder();
|
||||||
outputBuffer.append("Negotiate ");
|
outputBuffer.append("Negotiate ");
|
||||||
outputBuffer.append(Bytes.toString(Base64.getEncoder().encode(outToken)));
|
outputBuffer.append(Bytes.toString(Base64.getEncoder().encode(outToken)));
|
||||||
System.out.print("Ticket is: " + outputBuffer);
|
System.out.print("Ticket is: " + outputBuffer);
|
||||||
return outputBuffer.toString();
|
return outputBuffer.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void printVersions(ByteBuffer row, List<TCell> versions) {
|
|
||||||
StringBuilder rowStr = new StringBuilder();
|
|
||||||
for (TCell cell : versions) {
|
|
||||||
rowStr.append(ClientUtils.utf8(cell.value.array()));
|
|
||||||
rowStr.append("; ");
|
|
||||||
}
|
|
||||||
System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void printRow(TRowResult rowResult) {
|
|
||||||
ClientUtils.printRow(rowResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Subject getSubject() throws Exception {
|
static Subject getSubject() throws Exception {
|
||||||
if (!secure) {
|
if (!secure) {
|
||||||
return new Subject();
|
return new Subject();
|
||||||
|
|
|
@ -140,11 +140,11 @@ public class DemoClient {
|
||||||
|
|
||||||
TResult result = client.get(table, get);
|
TResult result = client.get(table, get);
|
||||||
|
|
||||||
System.out.print("row = " + new String(result.getRow()));
|
System.out.print("row = " + ClientUtils.utf8(result.getRow()));
|
||||||
for (TColumnValue resultColumnValue : result.getColumnValues()) {
|
for (TColumnValue resultColumnValue : result.getColumnValues()) {
|
||||||
System.out.print("family = " + new String(resultColumnValue.getFamily()));
|
System.out.print("family = " + ClientUtils.utf8(resultColumnValue.getFamily()));
|
||||||
System.out.print("qualifier = " + new String(resultColumnValue.getFamily()));
|
System.out.print("qualifier = " + ClientUtils.utf8(resultColumnValue.getFamily()));
|
||||||
System.out.print("value = " + new String(resultColumnValue.getValue()));
|
System.out.print("value = " + ClientUtils.utf8(resultColumnValue.getValue()));
|
||||||
System.out.print("timestamp = " + resultColumnValue.getTimestamp());
|
System.out.print("timestamp = " + resultColumnValue.getTimestamp());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.util;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.SortedMap;
|
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import javax.security.auth.Subject;
|
import javax.security.auth.Subject;
|
||||||
import javax.security.auth.login.AppConfigurationEntry;
|
import javax.security.auth.login.AppConfigurationEntry;
|
||||||
|
@ -87,7 +86,7 @@ public final class ClientUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
StringBuilder rowStr = new StringBuilder();
|
StringBuilder rowStr = new StringBuilder();
|
||||||
for (SortedMap.Entry<String, TCell> entry : sorted.entrySet()) {
|
for (Map.Entry<String, TCell> entry : sorted.entrySet()) {
|
||||||
rowStr.append(entry.getKey());
|
rowStr.append(entry.getKey());
|
||||||
rowStr.append(" => ");
|
rowStr.append(" => ");
|
||||||
rowStr.append(utf8(entry.getValue().value.array()));
|
rowStr.append(utf8(entry.getValue().value.array()));
|
||||||
|
@ -98,8 +97,8 @@ public final class ClientUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper to translate byte[]'s to UTF8 strings
|
* Helper to translate byte[]s to UTF8 strings
|
||||||
* @param buf byte array buffer
|
* @param buf byte array
|
||||||
* @return UTF8 decoded string value
|
* @return UTF8 decoded string value
|
||||||
*/
|
*/
|
||||||
public static String utf8(final byte[] buf) {
|
public static String utf8(final byte[] buf) {
|
||||||
|
@ -110,4 +109,17 @@ public final class ClientUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to translate a byte buffer to UTF8 strings
|
||||||
|
* @param bb byte buffer
|
||||||
|
* @return UTF8 decoded string value
|
||||||
|
*/
|
||||||
|
public static String utf8(final ByteBuffer bb) {
|
||||||
|
// performance is not very critical here so we always copy the BB to a byte array
|
||||||
|
byte[] buf = new byte[bb.remaining()];
|
||||||
|
// duplicate so the get will not change the position of the original bb
|
||||||
|
bb.duplicate().get(buf);
|
||||||
|
return utf8(buf);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.coprocessor.example;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InterruptedIOException;
|
||||||
import java.io.UncheckedIOException;
|
import java.io.UncheckedIOException;
|
||||||
import java.util.stream.IntStream;
|
import java.util.stream.IntStream;
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtil;
|
import org.apache.hadoop.hbase.HBaseTestingUtil;
|
||||||
|
@ -78,6 +79,7 @@ public class WriteHeavyIncrementObserverTestBase {
|
||||||
try {
|
try {
|
||||||
Thread.sleep(10);
|
Thread.sleep(10);
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
|
throw (IOException) new InterruptedIOException().initCause(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import java.io.IOException;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -128,8 +129,8 @@ public class TestShadeSaslAuthenticationProvider {
|
||||||
if (fs.exists(p)) {
|
if (fs.exists(p)) {
|
||||||
fs.delete(p, true);
|
fs.delete(p, true);
|
||||||
}
|
}
|
||||||
try (FSDataOutputStream out = fs.create(p);
|
try (FSDataOutputStream out = fs.create(p); BufferedWriter writer =
|
||||||
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out))) {
|
new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) {
|
||||||
for (Entry<String, char[]> e : userDatabase.entrySet()) {
|
for (Entry<String, char[]> e : userDatabase.entrySet()) {
|
||||||
writer.write(e.getKey());
|
writer.write(e.getKey());
|
||||||
writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR);
|
writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR);
|
||||||
|
|
Loading…
Reference in New Issue