HBASE-22326 Fixed Checkstyle errors in hbase-examples
This commit is contained in:
parent
21fe1d27d2
commit
65b8179c22
|
@ -69,6 +69,13 @@
|
||||||
<groupId>net.revelc.code</groupId>
|
<groupId>net.revelc.code</groupId>
|
||||||
<artifactId>warbucks-maven-plugin</artifactId>
|
<artifactId>warbucks-maven-plugin</artifactId>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<failOnViolation>true</failOnViolation>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.client.example;
|
package org.apache.hadoop.hbase.client.example;
|
||||||
|
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
|
@ -51,7 +50,7 @@ public class RefreshHFilesClient extends Configured implements Tool, Closeable {
|
||||||
/**
|
/**
|
||||||
* Constructor with Conf object
|
* Constructor with Conf object
|
||||||
*
|
*
|
||||||
* @param cfg
|
* @param cfg the {@link Configuration} object to use
|
||||||
*/
|
*/
|
||||||
public RefreshHFilesClient(Configuration cfg) {
|
public RefreshHFilesClient(Configuration cfg) {
|
||||||
try {
|
try {
|
||||||
|
@ -75,26 +74,28 @@ public class RefreshHFilesClient extends Configured implements Tool, Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void refreshHFiles(final Table table) throws Throwable {
|
public void refreshHFiles(final Table table) throws Throwable {
|
||||||
final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest
|
final RefreshHFilesProtos.RefreshHFilesRequest request =
|
||||||
.getDefaultInstance();
|
RefreshHFilesProtos.RefreshHFilesRequest.getDefaultInstance();
|
||||||
table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW,
|
table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class,
|
||||||
HConstants.EMPTY_END_ROW,
|
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
|
||||||
new Batch.Call<RefreshHFilesProtos.RefreshHFilesService,
|
new Batch.Call<RefreshHFilesProtos.RefreshHFilesService,
|
||||||
RefreshHFilesProtos.RefreshHFilesResponse>() {
|
RefreshHFilesProtos.RefreshHFilesResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public RefreshHFilesProtos.RefreshHFilesResponse call(
|
public RefreshHFilesProtos.RefreshHFilesResponse call(
|
||||||
RefreshHFilesProtos.RefreshHFilesService refreshHFilesService)
|
RefreshHFilesProtos.RefreshHFilesService refreshHFilesService)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
ServerRpcController controller = new ServerRpcController();
|
ServerRpcController controller = new ServerRpcController();
|
||||||
BlockingRpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> rpcCallback =
|
BlockingRpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> rpcCallback =
|
||||||
new BlockingRpcCallback<>();
|
new BlockingRpcCallback<>();
|
||||||
refreshHFilesService.refreshHFiles(controller, request, rpcCallback);
|
refreshHFilesService.refreshHFiles(controller, request, rpcCallback);
|
||||||
if (controller.failedOnException()) {
|
|
||||||
throw controller.getFailedOn();
|
if (controller.failedOnException()) {
|
||||||
}
|
throw controller.getFailedOn();
|
||||||
return rpcCallback.get();
|
}
|
||||||
}
|
|
||||||
});
|
return rpcCallback.get();
|
||||||
|
}
|
||||||
|
});
|
||||||
LOG.debug("Done refreshing HFiles");
|
LOG.debug("Done refreshing HFiles");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,8 +87,8 @@ import org.slf4j.LoggerFactory;
|
||||||
* return rpcCallback.get();
|
* return rpcCallback.get();
|
||||||
* }
|
* }
|
||||||
* };
|
* };
|
||||||
* Map<byte[], BulkDeleteResponse> result = ht.coprocessorService(BulkDeleteService.class, scan
|
* Map<byte[], BulkDeleteResponse> result = ht.coprocessorService(BulkDeleteService.class,
|
||||||
* .getStartRow(), scan.getStopRow(), callable);
|
* scan.getStartRow(), scan.getStopRow(), callable);
|
||||||
* for (BulkDeleteResponse response : result.values()) {
|
* for (BulkDeleteResponse response : result.values()) {
|
||||||
* noOfDeletedRows += response.getRowsDeleted();
|
* noOfDeletedRows += response.getRowsDeleted();
|
||||||
* }
|
* }
|
||||||
|
@ -225,7 +225,8 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
|
||||||
int noOfVersionsToDelete = 0;
|
int noOfVersionsToDelete = 0;
|
||||||
if (timestamp == null) {
|
if (timestamp == null) {
|
||||||
for (Cell kv : deleteRow) {
|
for (Cell kv : deleteRow) {
|
||||||
delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp());
|
delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv),
|
||||||
|
kv.getTimestamp());
|
||||||
noOfVersionsToDelete++;
|
noOfVersionsToDelete++;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.coprocessor.example;
|
package org.apache.hadoop.hbase.coprocessor.example;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -94,7 +93,8 @@ public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, Mast
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void preDisableTable(ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws IOException {
|
public void preDisableTable(ObserverContext<MasterCoprocessorEnvironment> ctx,
|
||||||
|
TableName tableName) throws IOException {
|
||||||
// Increment the Counter for disable table operations
|
// Increment the Counter for disable table operations
|
||||||
this.disableTableCounter.increment();
|
this.disableTableCounter.increment();
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.coprocessor.example;
|
package org.apache.hadoop.hbase.coprocessor.example;
|
||||||
|
|
||||||
import com.google.protobuf.RpcCallback;
|
import com.google.protobuf.RpcCallback;
|
||||||
|
@ -57,8 +56,9 @@ public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesServ
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void refreshHFiles(RpcController controller, RefreshHFilesProtos.RefreshHFilesRequest request,
|
public void refreshHFiles(RpcController controller,
|
||||||
RpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> done) {
|
RefreshHFilesProtos.RefreshHFilesRequest request,
|
||||||
|
RpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> done) {
|
||||||
try {
|
try {
|
||||||
for (Store store : env.getRegion().getStores()) {
|
for (Store store : env.getRegion().getStores()) {
|
||||||
LOG.debug("Refreshing HFiles for region: " + store.getRegionInfo().getRegionNameAsString() +
|
LOG.debug("Refreshing HFiles for region: " + store.getRegionInfo().getRegionNameAsString() +
|
||||||
|
|
|
@ -52,7 +52,8 @@ import org.apache.yetus.audience.InterfaceAudience;
|
||||||
* Modify ${HADOOP_HOME}/conf/hadoop-env.sh to include the hbase jar, the
|
* Modify ${HADOOP_HOME}/conf/hadoop-env.sh to include the hbase jar, the
|
||||||
* zookeeper jar (can be found in lib/ directory under HBase root, the examples output directory,
|
* zookeeper jar (can be found in lib/ directory under HBase root, the examples output directory,
|
||||||
* and the hbase conf directory in HADOOP_CLASSPATH, and then run
|
* and the hbase conf directory in HADOOP_CLASSPATH, and then run
|
||||||
* <tt><strong>bin/hadoop org.apache.hadoop.hbase.mapreduce.IndexBuilder TABLE_NAME COLUMN_FAMILY ATTR [ATTR ...]</strong></tt>
|
* <tt><strong>bin/hadoop org.apache.hadoop.hbase.mapreduce.IndexBuilder
|
||||||
|
* TABLE_NAME COLUMN_FAMILY ATTR [ATTR ...]</strong></tt>
|
||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -117,8 +118,7 @@ public class IndexBuilder extends Configured implements Tool {
|
||||||
/**
|
/**
|
||||||
* Job configuration.
|
* Job configuration.
|
||||||
*/
|
*/
|
||||||
public static Job configureJob(Configuration conf, String [] args)
|
public static Job configureJob(Configuration conf, String [] args) throws IOException {
|
||||||
throws IOException {
|
|
||||||
String tableName = args[0];
|
String tableName = args[0];
|
||||||
String columnFamily = args[1];
|
String columnFamily = args[1];
|
||||||
System.out.println("****" + tableName);
|
System.out.println("****" + tableName);
|
||||||
|
|
|
@ -66,16 +66,12 @@ public class SampleUploader extends Configured implements Tool {
|
||||||
|
|
||||||
private static final String NAME = "SampleUploader";
|
private static final String NAME = "SampleUploader";
|
||||||
|
|
||||||
static class Uploader
|
static class Uploader extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
|
||||||
extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
|
|
||||||
|
|
||||||
private long checkpoint = 100;
|
private long checkpoint = 100;
|
||||||
private long count = 0;
|
private long count = 0;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void map(LongWritable key, Text line, Context context)
|
public void map(LongWritable key, Text line, Context context) throws IOException {
|
||||||
throws IOException {
|
|
||||||
|
|
||||||
// Input is a CSV file
|
// Input is a CSV file
|
||||||
// Each map() is a single line, where the key is the line number
|
// Each map() is a single line, where the key is the line number
|
||||||
// Each line is comma-delimited; row,family,qualifier,value
|
// Each line is comma-delimited; row,family,qualifier,value
|
||||||
|
@ -117,8 +113,7 @@ public class SampleUploader extends Configured implements Tool {
|
||||||
/**
|
/**
|
||||||
* Job configuration.
|
* Job configuration.
|
||||||
*/
|
*/
|
||||||
public static Job configureJob(Configuration conf, String [] args)
|
public static Job configureJob(Configuration conf, String [] args) throws IOException {
|
||||||
throws IOException {
|
|
||||||
Path inputPath = new Path(args[0]);
|
Path inputPath = new Path(args[0]);
|
||||||
String tableName = args[1];
|
String tableName = args[1];
|
||||||
Job job = new Job(conf, NAME + "_" + tableName);
|
Job job = new Job(conf, NAME + "_" + tableName);
|
||||||
|
|
|
@ -192,11 +192,11 @@ public class DemoClient {
|
||||||
|
|
||||||
// Test UTF-8 handling
|
// Test UTF-8 handling
|
||||||
byte[] invalid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
|
byte[] invalid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
|
||||||
(byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1};
|
(byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1};
|
||||||
byte[] valid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
|
byte[] valid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
|
||||||
(byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83,
|
(byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83,
|
||||||
(byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3,
|
(byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3,
|
||||||
(byte) 0x83, (byte) 0xAB};
|
(byte) 0x83, (byte) 0xAB};
|
||||||
|
|
||||||
ArrayList<Mutation> mutations;
|
ArrayList<Mutation> mutations;
|
||||||
// non-utf8 is fine for data
|
// non-utf8 is fine for data
|
||||||
|
@ -421,9 +421,9 @@ public class DemoClient {
|
||||||
options.put("debug", "true");
|
options.put("debug", "true");
|
||||||
|
|
||||||
return new AppConfigurationEntry[]{
|
return new AppConfigurationEntry[]{
|
||||||
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
|
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
|
||||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
||||||
options)};
|
options)};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -70,9 +70,7 @@ public class HttpDoAsClient {
|
||||||
static protected String principal = null;
|
static protected String principal = null;
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
if (args.length < 3 || args.length > 4) {
|
if (args.length < 3 || args.length > 4) {
|
||||||
|
|
||||||
System.out.println("Invalid arguments!");
|
System.out.println("Invalid arguments!");
|
||||||
System.out.println("Usage: HttpDoAsClient host port doAsUserName [security=true]");
|
System.out.println("Usage: HttpDoAsClient host port doAsUserName [security=true]");
|
||||||
System.exit(-1);
|
System.exit(-1);
|
||||||
|
@ -143,8 +141,6 @@ public class HttpDoAsClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// Create the demo table with two column families, entry: and unused:
|
// Create the demo table with two column families, entry: and unused:
|
||||||
//
|
//
|
||||||
|
@ -172,7 +168,7 @@ public class HttpDoAsClient {
|
||||||
Map<ByteBuffer, ColumnDescriptor> columnMap = refresh(client, httpClient)
|
Map<ByteBuffer, ColumnDescriptor> columnMap = refresh(client, httpClient)
|
||||||
.getColumnDescriptors(ByteBuffer.wrap(t));
|
.getColumnDescriptors(ByteBuffer.wrap(t));
|
||||||
for (ColumnDescriptor col2 : columnMap.values()) {
|
for (ColumnDescriptor col2 : columnMap.values()) {
|
||||||
System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + Integer.toString(col2.maxVersions));
|
System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
|
||||||
}
|
}
|
||||||
|
|
||||||
transport.close();
|
transport.close();
|
||||||
|
@ -181,7 +177,7 @@ public class HttpDoAsClient {
|
||||||
|
|
||||||
private Hbase.Client refresh(Hbase.Client client, THttpClient httpClient) {
|
private Hbase.Client refresh(Hbase.Client client, THttpClient httpClient) {
|
||||||
httpClient.setCustomHeader("doAs", doAsUser);
|
httpClient.setCustomHeader("doAs", doAsUser);
|
||||||
if(secure) {
|
if (secure) {
|
||||||
try {
|
try {
|
||||||
httpClient.setCustomHeader("Authorization", generateTicket());
|
httpClient.setCustomHeader("Authorization", generateTicket());
|
||||||
} catch (GSSException e) {
|
} catch (GSSException e) {
|
||||||
|
@ -232,7 +228,6 @@ public class HttpDoAsClient {
|
||||||
|
|
||||||
private void printRow(TRowResult rowResult) {
|
private void printRow(TRowResult rowResult) {
|
||||||
// copy values into a TreeMap to get them in sorted order
|
// copy values into a TreeMap to get them in sorted order
|
||||||
|
|
||||||
TreeMap<String, TCell> sorted = new TreeMap<>();
|
TreeMap<String, TCell> sorted = new TreeMap<>();
|
||||||
for (Map.Entry<ByteBuffer, TCell> column : rowResult.columns.entrySet()) {
|
for (Map.Entry<ByteBuffer, TCell> column : rowResult.columns.entrySet()) {
|
||||||
sorted.put(utf8(column.getKey().array()), column.getValue());
|
sorted.put(utf8(column.getKey().array()), column.getValue());
|
||||||
|
@ -249,7 +244,10 @@ public class HttpDoAsClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
static Subject getSubject() throws Exception {
|
static Subject getSubject() throws Exception {
|
||||||
if (!secure) return new Subject();
|
if (!secure) {
|
||||||
|
return new Subject();
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* To authenticate the DemoClient, kinit should be invoked ahead.
|
* To authenticate the DemoClient, kinit should be invoked ahead.
|
||||||
* Here we try to get the Kerberos credential from the ticket cache.
|
* Here we try to get the Kerberos credential from the ticket cache.
|
||||||
|
@ -273,9 +271,9 @@ public class HttpDoAsClient {
|
||||||
options.put("debug", "true");
|
options.put("debug", "true");
|
||||||
|
|
||||||
return new AppConfigurationEntry[]{
|
return new AppConfigurationEntry[]{
|
||||||
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
|
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
|
||||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
||||||
options)};
|
options)};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
context.login();
|
context.login();
|
||||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class DemoClient {
|
public class DemoClient {
|
||||||
|
|
||||||
private static String host = "localhost";
|
private static String host = "localhost";
|
||||||
private static int port = 9090;
|
private static int port = 9090;
|
||||||
private static boolean secure = false;
|
private static boolean secure = false;
|
||||||
|
@ -55,7 +54,8 @@ public class DemoClient {
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
System.out.println("Thrift2 Demo");
|
System.out.println("Thrift2 Demo");
|
||||||
System.out.println("Usage: DemoClient [host=localhost] [port=9090] [secure=false]");
|
System.out.println("Usage: DemoClient [host=localhost] [port=9090] [secure=false]");
|
||||||
System.out.println("This demo assumes you have a table called \"example\" with a column family called \"family1\"");
|
System.out.println("This demo assumes you have a table called \"example\" with a column " +
|
||||||
|
"family called \"family1\"");
|
||||||
|
|
||||||
// use passed in arguments instead of defaults
|
// use passed in arguments instead of defaults
|
||||||
if (args.length >= 1) {
|
if (args.length >= 1) {
|
||||||
|
@ -96,7 +96,7 @@ public class DemoClient {
|
||||||
if (framed) {
|
if (framed) {
|
||||||
transport = new TFramedTransport(transport);
|
transport = new TFramedTransport(transport);
|
||||||
} else if (secure) {
|
} else if (secure) {
|
||||||
/**
|
/*
|
||||||
* The Thrift server the DemoClient is trying to connect to
|
* The Thrift server the DemoClient is trying to connect to
|
||||||
* must have a matching principal, and support authentication.
|
* must have a matching principal, and support authentication.
|
||||||
*
|
*
|
||||||
|
@ -149,7 +149,9 @@ public class DemoClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
static Subject getSubject() throws Exception {
|
static Subject getSubject() throws Exception {
|
||||||
if (!secure) return new Subject();
|
if (!secure) {
|
||||||
|
return new Subject();
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* To authenticate the DemoClient, kinit should be invoked ahead.
|
* To authenticate the DemoClient, kinit should be invoked ahead.
|
||||||
|
@ -174,9 +176,9 @@ public class DemoClient {
|
||||||
options.put("debug", "true");
|
options.put("debug", "true");
|
||||||
|
|
||||||
return new AppConfigurationEntry[]{
|
return new AppConfigurationEntry[]{
|
||||||
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
|
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
|
||||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
||||||
options)};
|
options)};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
context.login();
|
context.login();
|
||||||
|
|
|
@ -55,7 +55,8 @@ public class TestRefreshHFilesBase {
|
||||||
CONF.set(HConstants.REGION_IMPL, regionImpl);
|
CONF.set(HConstants.REGION_IMPL, regionImpl);
|
||||||
CONF.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
|
CONF.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
|
||||||
|
|
||||||
CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, RefreshHFilesEndpoint.class.getName());
|
CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
|
||||||
|
RefreshHFilesEndpoint.class.getName());
|
||||||
cluster = HTU.startMiniCluster(NUM_RS);
|
cluster = HTU.startMiniCluster(NUM_RS);
|
||||||
|
|
||||||
// Create table
|
// Create table
|
||||||
|
@ -80,9 +81,9 @@ public class TestRefreshHFilesBase {
|
||||||
for (Region region : cluster.getRegions(TABLE_NAME)) {
|
for (Region region : cluster.getRegions(TABLE_NAME)) {
|
||||||
Path regionDir = new Path(tableDir, region.getRegionInfo().getEncodedName());
|
Path regionDir = new Path(tableDir, region.getRegionInfo().getEncodedName());
|
||||||
Path familyDir = new Path(regionDir, Bytes.toString(FAMILY));
|
Path familyDir = new Path(regionDir, Bytes.toString(FAMILY));
|
||||||
HFileTestUtil
|
HFileTestUtil.createHFile(HTU.getConfiguration(), HTU.getTestFileSystem(),
|
||||||
.createHFile(HTU.getConfiguration(), HTU.getTestFileSystem(), new Path(familyDir, HFILE_NAME), FAMILY,
|
new Path(familyDir, HFILE_NAME), FAMILY, QUALIFIER, Bytes.toBytes("50"),
|
||||||
QUALIFIER, Bytes.toBytes("50"), Bytes.toBytes("60"), NUM_ROWS);
|
Bytes.toBytes("60"), NUM_ROWS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,7 +44,6 @@ import org.junit.experimental.categories.Category;
|
||||||
|
|
||||||
@Category(MediumTests.class)
|
@Category(MediumTests.class)
|
||||||
public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
|
public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestRefreshHFilesEndpoint.class);
|
HBaseClassTestRule.forClass(TestRefreshHFilesEndpoint.class);
|
||||||
|
@ -69,8 +68,9 @@ public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
|
||||||
RefreshHFilesClient refreshHFilesClient = new RefreshHFilesClient(CONF);
|
RefreshHFilesClient refreshHFilesClient = new RefreshHFilesClient(CONF);
|
||||||
refreshHFilesClient.refreshHFiles(TABLE_NAME);
|
refreshHFilesClient.refreshHFiles(TABLE_NAME);
|
||||||
} catch (RetriesExhaustedException rex) {
|
} catch (RetriesExhaustedException rex) {
|
||||||
if (rex.getCause() instanceof IOException)
|
if (rex.getCause() instanceof IOException) {
|
||||||
throw new IOException();
|
throw new IOException();
|
||||||
|
}
|
||||||
} catch (Throwable ex) {
|
} catch (Throwable ex) {
|
||||||
LOG.error(ex.toString(), ex);
|
LOG.error(ex.toString(), ex);
|
||||||
fail("Couldn't call the RefreshRegionHFilesEndpoint");
|
fail("Couldn't call the RefreshRegionHFilesEndpoint");
|
||||||
|
@ -81,15 +81,15 @@ public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
|
||||||
HStoreWithFaultyRefreshHFilesAPI store;
|
HStoreWithFaultyRefreshHFilesAPI store;
|
||||||
|
|
||||||
public HRegionForRefreshHFilesEP(final Path tableDir, final WAL wal, final FileSystem fs,
|
public HRegionForRefreshHFilesEP(final Path tableDir, final WAL wal, final FileSystem fs,
|
||||||
final Configuration confParam, final RegionInfo regionInfo,
|
final Configuration confParam, final RegionInfo regionInfo, final TableDescriptor htd,
|
||||||
final TableDescriptor htd, final RegionServerServices rsServices) {
|
final RegionServerServices rsServices) {
|
||||||
super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
|
super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<HStore> getStores() {
|
public List<HStore> getStores() {
|
||||||
List<HStore> list = new ArrayList<>(stores.size());
|
List<HStore> list = new ArrayList<>(stores.size());
|
||||||
/**
|
/*
|
||||||
* This is used to trigger the custom definition (faulty)
|
* This is used to trigger the custom definition (faulty)
|
||||||
* of refresh HFiles API.
|
* of refresh HFiles API.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -17,8 +17,13 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.mapreduce;
|
package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.mockito.Mockito.*;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.Mockito.any;
|
||||||
|
import static org.mockito.Mockito.doAnswer;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
|
@ -49,7 +54,6 @@ import org.mockito.stubbing.Answer;
|
||||||
|
|
||||||
@Category({MapReduceTests.class, LargeTests.class})
|
@Category({MapReduceTests.class, LargeTests.class})
|
||||||
public class TestMapReduceExamples {
|
public class TestMapReduceExamples {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestMapReduceExamples.class);
|
HBaseClassTestRule.forClass(TestMapReduceExamples.class);
|
||||||
|
@ -59,11 +63,9 @@ public class TestMapReduceExamples {
|
||||||
/**
|
/**
|
||||||
* Test SampleUploader from examples
|
* Test SampleUploader from examples
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Test
|
@Test
|
||||||
public void testSampleUploader() throws Exception {
|
public void testSampleUploader() throws Exception {
|
||||||
|
|
||||||
Configuration configuration = new Configuration();
|
Configuration configuration = new Configuration();
|
||||||
Uploader uploader = new Uploader();
|
Uploader uploader = new Uploader();
|
||||||
Mapper<LongWritable, Text, ImmutableBytesWritable, Put>.Context ctx = mock(Context.class);
|
Mapper<LongWritable, Text, ImmutableBytesWritable, Put>.Context ctx = mock(Context.class);
|
||||||
|
@ -86,7 +88,6 @@ public class TestMapReduceExamples {
|
||||||
String[] args = { dir.toString(), "simpleTable" };
|
String[] args = { dir.toString(), "simpleTable" };
|
||||||
Job job = SampleUploader.configureJob(configuration, args);
|
Job job = SampleUploader.configureJob(configuration, args);
|
||||||
assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());
|
assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -179,11 +180,9 @@ public class TestMapReduceExamples {
|
||||||
assertTrue(data.toString().contains(
|
assertTrue(data.toString().contains(
|
||||||
"Usage: IndexBuilder <TABLE_NAME> <COLUMN_FAMILY> <ATTR> [<ATTR> ...]"));
|
"Usage: IndexBuilder <TABLE_NAME> <COLUMN_FAMILY> <ATTR> [<ATTR> ...]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
System.setErr(oldPrintStream);
|
System.setErr(oldPrintStream);
|
||||||
System.setSecurityManager(SECURITY_MANAGER);
|
System.setSecurityManager(SECURITY_MANAGER);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue