From 65b8179c2294359f97744db38b57579f743a996c Mon Sep 17 00:00:00 2001
From: Jan Hentschel
Date: Mon, 29 Apr 2019 16:11:24 +0200
Subject: [PATCH] HBASE-22326 Fixed Checkstyle errors in hbase-examples
---
hbase-examples/pom.xml | 7 +++
.../client/example/RefreshHFilesClient.java | 45 ++++++++++---------
.../example/BulkDeleteEndpoint.java | 7 +--
.../ExampleMasterObserverWithMetrics.java | 4 +-
.../example/RefreshHFilesEndpoint.java | 6 +--
.../hadoop/hbase/mapreduce/IndexBuilder.java | 6 +--
.../hbase/mapreduce/SampleUploader.java | 11 ++---
.../hadoop/hbase/thrift/DemoClient.java | 14 +++---
.../hadoop/hbase/thrift/HttpDoAsClient.java | 20 ++++-----
.../hadoop/hbase/thrift2/DemoClient.java | 16 ++++---
.../example/TestRefreshHFilesBase.java | 9 ++--
.../example/TestRefreshHFilesEndpoint.java | 10 ++---
.../mapreduce/TestMapReduceExamples.java | 15 +++----
13 files changed, 87 insertions(+), 83 deletions(-)
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index b20e1dfa96b..f2ff2fd482c 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -69,6 +69,13 @@
net.revelc.code
warbucks-maven-plugin
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+ true
+
+
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
index 06ad195c62e..0611e71779f 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
@@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
package org.apache.hadoop.hbase.client.example;
import java.io.Closeable;
@@ -51,7 +50,7 @@ public class RefreshHFilesClient extends Configured implements Tool, Closeable {
/**
* Constructor with Conf object
*
- * @param cfg
+ * @param cfg the {@link Configuration} object to use
*/
public RefreshHFilesClient(Configuration cfg) {
try {
@@ -75,26 +74,28 @@ public class RefreshHFilesClient extends Configured implements Tool, Closeable {
}
public void refreshHFiles(final Table table) throws Throwable {
- final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest
- .getDefaultInstance();
- table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW,
- HConstants.EMPTY_END_ROW,
- new Batch.Call() {
- @Override
- public RefreshHFilesProtos.RefreshHFilesResponse call(
- RefreshHFilesProtos.RefreshHFilesService refreshHFilesService)
- throws IOException {
- ServerRpcController controller = new ServerRpcController();
- BlockingRpcCallback rpcCallback =
- new BlockingRpcCallback<>();
- refreshHFilesService.refreshHFiles(controller, request, rpcCallback);
- if (controller.failedOnException()) {
- throw controller.getFailedOn();
- }
- return rpcCallback.get();
- }
- });
+ final RefreshHFilesProtos.RefreshHFilesRequest request =
+ RefreshHFilesProtos.RefreshHFilesRequest.getDefaultInstance();
+ table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class,
+ HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
+ new Batch.Call() {
+ @Override
+ public RefreshHFilesProtos.RefreshHFilesResponse call(
+ RefreshHFilesProtos.RefreshHFilesService refreshHFilesService)
+ throws IOException {
+ ServerRpcController controller = new ServerRpcController();
+ BlockingRpcCallback rpcCallback =
+ new BlockingRpcCallback<>();
+ refreshHFilesService.refreshHFiles(controller, request, rpcCallback);
+
+ if (controller.failedOnException()) {
+ throw controller.getFailedOn();
+ }
+
+ return rpcCallback.get();
+ }
+ });
LOG.debug("Done refreshing HFiles");
}
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index e2681ae04c4..94550534a66 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -87,8 +87,8 @@ import org.slf4j.LoggerFactory;
* return rpcCallback.get();
* }
* };
- * Map<byte[], BulkDeleteResponse> result = ht.coprocessorService(BulkDeleteService.class, scan
- * .getStartRow(), scan.getStopRow(), callable);
+ * Map<byte[], BulkDeleteResponse> result = ht.coprocessorService(BulkDeleteService.class,
+ * scan.getStartRow(), scan.getStopRow(), callable);
* for (BulkDeleteResponse response : result.values()) {
* noOfDeletedRows += response.getRowsDeleted();
* }
@@ -225,7 +225,8 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
int noOfVersionsToDelete = 0;
if (timestamp == null) {
for (Cell kv : deleteRow) {
- delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp());
+ delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv),
+ kv.getTimestamp());
noOfVersionsToDelete++;
}
} else {
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
index 5fe920ef152..ecc255938ad 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
@@ -15,7 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException;
@@ -94,7 +93,8 @@ public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, Mast
}
@Override
- public void preDisableTable(ObserverContext ctx, TableName tableName) throws IOException {
+ public void preDisableTable(ObserverContext ctx,
+ TableName tableName) throws IOException {
// Increment the Counter for disable table operations
this.disableTableCounter.increment();
}
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
index 29fe90c0ada..2cb9fd3bff3 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
@@ -15,7 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
package org.apache.hadoop.hbase.coprocessor.example;
import com.google.protobuf.RpcCallback;
@@ -57,8 +56,9 @@ public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesServ
}
@Override
- public void refreshHFiles(RpcController controller, RefreshHFilesProtos.RefreshHFilesRequest request,
- RpcCallback done) {
+ public void refreshHFiles(RpcController controller,
+ RefreshHFilesProtos.RefreshHFilesRequest request,
+ RpcCallback done) {
try {
for (Store store : env.getRegion().getStores()) {
LOG.debug("Refreshing HFiles for region: " + store.getRegionInfo().getRegionNameAsString() +
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 3098ac250fe..b386b2bcb82 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -52,7 +52,8 @@ import org.apache.yetus.audience.InterfaceAudience;
* Modify ${HADOOP_HOME}/conf/hadoop-env.sh to include the hbase jar, the
* zookeeper jar (can be found in lib/ directory under HBase root, the examples output directory,
* and the hbase conf directory in HADOOP_CLASSPATH, and then run
- * bin/hadoop org.apache.hadoop.hbase.mapreduce.IndexBuilder TABLE_NAME COLUMN_FAMILY ATTR [ATTR ...]
+ * bin/hadoop org.apache.hadoop.hbase.mapreduce.IndexBuilder
+ * TABLE_NAME COLUMN_FAMILY ATTR [ATTR ...]
*
*
*
@@ -117,8 +118,7 @@ public class IndexBuilder extends Configured implements Tool {
/**
* Job configuration.
*/
- public static Job configureJob(Configuration conf, String [] args)
- throws IOException {
+ public static Job configureJob(Configuration conf, String [] args) throws IOException {
String tableName = args[0];
String columnFamily = args[1];
System.out.println("****" + tableName);
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
index 3273f492618..76f3de28ee8 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
@@ -66,16 +66,12 @@ public class SampleUploader extends Configured implements Tool {
private static final String NAME = "SampleUploader";
- static class Uploader
- extends Mapper {
-
+ static class Uploader extends Mapper {
private long checkpoint = 100;
private long count = 0;
@Override
- public void map(LongWritable key, Text line, Context context)
- throws IOException {
-
+ public void map(LongWritable key, Text line, Context context) throws IOException {
// Input is a CSV file
// Each map() is a single line, where the key is the line number
// Each line is comma-delimited; row,family,qualifier,value
@@ -117,8 +113,7 @@ public class SampleUploader extends Configured implements Tool {
/**
* Job configuration.
*/
- public static Job configureJob(Configuration conf, String [] args)
- throws IOException {
+ public static Job configureJob(Configuration conf, String [] args) throws IOException {
Path inputPath = new Path(args[0]);
String tableName = args[1];
Job job = new Job(conf, NAME + "_" + tableName);
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index c99d04a1348..aeb50f7a133 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -192,11 +192,11 @@ public class DemoClient {
// Test UTF-8 handling
byte[] invalid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
- (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1};
+ (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1};
byte[] valid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
- (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83,
- (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3,
- (byte) 0x83, (byte) 0xAB};
+ (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83,
+ (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3,
+ (byte) 0x83, (byte) 0xAB};
ArrayList mutations;
// non-utf8 is fine for data
@@ -421,9 +421,9 @@ public class DemoClient {
options.put("debug", "true");
return new AppConfigurationEntry[]{
- new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
- AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
- options)};
+ new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
+ AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+ options)};
}
});
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
index 106d8c6e530..a76ef9c42f7 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
@@ -70,9 +70,7 @@ public class HttpDoAsClient {
static protected String principal = null;
public static void main(String[] args) throws Exception {
-
if (args.length < 3 || args.length > 4) {
-
System.out.println("Invalid arguments!");
System.out.println("Usage: HttpDoAsClient host port doAsUserName [security=true]");
System.exit(-1);
@@ -143,8 +141,6 @@ public class HttpDoAsClient {
}
}
-
-
//
// Create the demo table with two column families, entry: and unused:
//
@@ -172,7 +168,7 @@ public class HttpDoAsClient {
Map columnMap = refresh(client, httpClient)
.getColumnDescriptors(ByteBuffer.wrap(t));
for (ColumnDescriptor col2 : columnMap.values()) {
- System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + Integer.toString(col2.maxVersions));
+ System.out.println(" column: " + utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
}
transport.close();
@@ -181,7 +177,7 @@ public class HttpDoAsClient {
private Hbase.Client refresh(Hbase.Client client, THttpClient httpClient) {
httpClient.setCustomHeader("doAs", doAsUser);
- if(secure) {
+ if (secure) {
try {
httpClient.setCustomHeader("Authorization", generateTicket());
} catch (GSSException e) {
@@ -232,7 +228,6 @@ public class HttpDoAsClient {
private void printRow(TRowResult rowResult) {
// copy values into a TreeMap to get them in sorted order
-
TreeMap sorted = new TreeMap<>();
for (Map.Entry column : rowResult.columns.entrySet()) {
sorted.put(utf8(column.getKey().array()), column.getValue());
@@ -249,7 +244,10 @@ public class HttpDoAsClient {
}
static Subject getSubject() throws Exception {
- if (!secure) return new Subject();
+ if (!secure) {
+ return new Subject();
+ }
+
/*
* To authenticate the DemoClient, kinit should be invoked ahead.
* Here we try to get the Kerberos credential from the ticket cache.
@@ -273,9 +271,9 @@ public class HttpDoAsClient {
options.put("debug", "true");
return new AppConfigurationEntry[]{
- new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
- AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
- options)};
+ new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
+ AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+ options)};
}
});
context.login();
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
index f8d4f64f587..165f3994de6 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
@@ -46,7 +46,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DemoClient {
-
private static String host = "localhost";
private static int port = 9090;
private static boolean secure = false;
@@ -55,7 +54,8 @@ public class DemoClient {
public static void main(String[] args) throws Exception {
System.out.println("Thrift2 Demo");
System.out.println("Usage: DemoClient [host=localhost] [port=9090] [secure=false]");
- System.out.println("This demo assumes you have a table called \"example\" with a column family called \"family1\"");
+ System.out.println("This demo assumes you have a table called \"example\" with a column " +
+ "family called \"family1\"");
// use passed in arguments instead of defaults
if (args.length >= 1) {
@@ -96,7 +96,7 @@ public class DemoClient {
if (framed) {
transport = new TFramedTransport(transport);
} else if (secure) {
- /**
+ /*
* The Thrift server the DemoClient is trying to connect to
* must have a matching principal, and support authentication.
*
@@ -149,7 +149,9 @@ public class DemoClient {
}
static Subject getSubject() throws Exception {
- if (!secure) return new Subject();
+ if (!secure) {
+ return new Subject();
+ }
/*
* To authenticate the DemoClient, kinit should be invoked ahead.
@@ -174,9 +176,9 @@ public class DemoClient {
options.put("debug", "true");
return new AppConfigurationEntry[]{
- new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
- AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
- options)};
+ new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
+ AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+ options)};
}
});
context.login();
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java
index b948b62c600..e84929d8103 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java
@@ -55,7 +55,8 @@ public class TestRefreshHFilesBase {
CONF.set(HConstants.REGION_IMPL, regionImpl);
CONF.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
- CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, RefreshHFilesEndpoint.class.getName());
+ CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+ RefreshHFilesEndpoint.class.getName());
cluster = HTU.startMiniCluster(NUM_RS);
// Create table
@@ -80,9 +81,9 @@ public class TestRefreshHFilesBase {
for (Region region : cluster.getRegions(TABLE_NAME)) {
Path regionDir = new Path(tableDir, region.getRegionInfo().getEncodedName());
Path familyDir = new Path(regionDir, Bytes.toString(FAMILY));
- HFileTestUtil
- .createHFile(HTU.getConfiguration(), HTU.getTestFileSystem(), new Path(familyDir, HFILE_NAME), FAMILY,
- QUALIFIER, Bytes.toBytes("50"), Bytes.toBytes("60"), NUM_ROWS);
+ HFileTestUtil.createHFile(HTU.getConfiguration(), HTU.getTestFileSystem(),
+ new Path(familyDir, HFILE_NAME), FAMILY, QUALIFIER, Bytes.toBytes("50"),
+ Bytes.toBytes("60"), NUM_ROWS);
}
}
}
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
index 3f9c23b460f..e726a1182f1 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
@@ -44,7 +44,6 @@ import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
-
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRefreshHFilesEndpoint.class);
@@ -69,8 +68,9 @@ public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
RefreshHFilesClient refreshHFilesClient = new RefreshHFilesClient(CONF);
refreshHFilesClient.refreshHFiles(TABLE_NAME);
} catch (RetriesExhaustedException rex) {
- if (rex.getCause() instanceof IOException)
+ if (rex.getCause() instanceof IOException) {
throw new IOException();
+ }
} catch (Throwable ex) {
LOG.error(ex.toString(), ex);
fail("Couldn't call the RefreshRegionHFilesEndpoint");
@@ -81,15 +81,15 @@ public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
HStoreWithFaultyRefreshHFilesAPI store;
public HRegionForRefreshHFilesEP(final Path tableDir, final WAL wal, final FileSystem fs,
- final Configuration confParam, final RegionInfo regionInfo,
- final TableDescriptor htd, final RegionServerServices rsServices) {
+ final Configuration confParam, final RegionInfo regionInfo, final TableDescriptor htd,
+ final RegionServerServices rsServices) {
super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
}
@Override
public List getStores() {
List list = new ArrayList<>(stores.size());
- /**
+ /*
* This is used to trigger the custom definition (faulty)
* of refresh HFiles API.
*/
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
index 3b7f7839661..43dba2c0928 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
@@ -17,8 +17,13 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
@@ -49,7 +54,6 @@ import org.mockito.stubbing.Answer;
@Category({MapReduceTests.class, LargeTests.class})
public class TestMapReduceExamples {
-
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMapReduceExamples.class);
@@ -59,11 +63,9 @@ public class TestMapReduceExamples {
/**
* Test SampleUploader from examples
*/
-
@SuppressWarnings("unchecked")
@Test
public void testSampleUploader() throws Exception {
-
Configuration configuration = new Configuration();
Uploader uploader = new Uploader();
Mapper.Context ctx = mock(Context.class);
@@ -86,7 +88,6 @@ public class TestMapReduceExamples {
String[] args = { dir.toString(), "simpleTable" };
Job job = SampleUploader.configureJob(configuration, args);
assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());
-
}
/**
@@ -179,11 +180,9 @@ public class TestMapReduceExamples {
assertTrue(data.toString().contains(
"Usage: IndexBuilder [ ...]"));
}
-
} finally {
System.setErr(oldPrintStream);
System.setSecurityManager(SECURITY_MANAGER);
}
-
}
}