HBASE-23635 Reduced number of Checkstyle violations in hbase-mapreduce

Signed-off-by: Viraj Jasani <vjasani@apache.org>
Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Jan Hentschel 2020-01-04 00:48:24 +01:00 committed by GitHub
parent ae7f3ebf1e
commit ab9766599d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 110 additions and 144 deletions

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -53,7 +54,6 @@ import org.apache.hbase.thirdparty.com.google.gson.Gson;
@Category({MiscTests.class, SmallTests.class})
public class TestPerformanceEvaluation {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
@ -64,19 +64,19 @@ public class TestPerformanceEvaluation {
public void testDefaultInMemoryCompaction() {
PerformanceEvaluation.TestOptions defaultOpts =
new PerformanceEvaluation.TestOptions();
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
defaultOpts.getInMemoryCompaction().toString());
HTableDescriptor htd = PerformanceEvaluation.getTableDescriptor(defaultOpts);
for (HColumnDescriptor hcd: htd.getFamilies()) {
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
hcd.getInMemoryCompaction().toString());
}
}
@Test
public void testSerialization() throws IOException {
public void testSerialization() {
PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
assertTrue(!options.isAutoFlush());
assertFalse(options.isAutoFlush());
options.setAutoFlush(true);
Gson gson = GsonUtil.createGson().create();
String optionsString = gson.toJson(options);
@ -101,8 +101,7 @@ public class TestPerformanceEvaluation {
long len = fs.getFileStatus(p).getLen();
assertTrue(len > 0);
byte[] content = new byte[(int) len];
FSDataInputStream dis = fs.open(p);
try {
try (FSDataInputStream dis = fs.open(p)) {
dis.readFully(content);
BufferedReader br = new BufferedReader(
new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8));
@ -111,8 +110,6 @@ public class TestPerformanceEvaluation {
count++;
}
assertEquals(clients, count);
} finally {
dis.close();
}
}
@ -170,9 +167,8 @@ public class TestPerformanceEvaluation {
}
@Test
public void testZipfian()
throws NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException,
IllegalArgumentException, InvocationTargetException {
public void testZipfian() throws NoSuchMethodException, SecurityException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
TestOptions opts = new PerformanceEvaluation.TestOptions();
opts.setValueZipf(true);
final int valueSize = 1024;
@ -197,10 +193,10 @@ public class TestPerformanceEvaluation {
public void testSetBufferSizeOption() {
TestOptions opts = new PerformanceEvaluation.TestOptions();
long bufferSize = opts.getBufferSize();
assertEquals(bufferSize, 2l * 1024l * 1024l);
opts.setBufferSize(64l * 1024l);
assertEquals(bufferSize, 2L * 1024L * 1024L);
opts.setBufferSize(64L * 1024L);
bufferSize = opts.getBufferSize();
assertEquals(bufferSize, 64l * 1024l);
assertEquals(bufferSize, 64L * 1024L);
}
@Test
@ -265,7 +261,7 @@ public class TestPerformanceEvaluation {
assertNotNull(options);
assertNotNull(options.getCmdName());
assertEquals(cmdName, options.getCmdName());
assertTrue(options.getMultiPut() == 10);
assertEquals(10, options.getMultiPut());
}
@Test
@ -288,6 +284,6 @@ public class TestPerformanceEvaluation {
assertNotNull(options);
assertNotNull(options.getCmdName());
assertEquals(cmdName, options.getCmdName());
assertTrue(options.getConnCount() == 10);
assertEquals(10, options.getConnCount());
}
}

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -44,14 +43,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
@Override
public RecordReader<NullWritable, NullWritable> createRecordReader(
InputSplit split,
TaskAttemptContext tac) throws IOException, InterruptedException {
InputSplit split, TaskAttemptContext tac) {
return new SingleRecordReader<>(NullWritable.get(), NullWritable.get());
}
@Override
public List<InputSplit> getSplits(JobContext context) throws IOException,
InterruptedException {
public List<InputSplit> getSplits(JobContext context) {
int count = getNumMapTasks(context.getConfiguration());
List<InputSplit> splits = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
@ -70,21 +67,21 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
private static class NullInputSplit extends InputSplit implements Writable {
@Override
public long getLength() throws IOException, InterruptedException {
public long getLength() {
return 0;
}
@Override
public String[] getLocations() throws IOException, InterruptedException {
public String[] getLocations() {
return new String[] {};
}
@Override
public void readFields(DataInput in) throws IOException {
public void readFields(DataInput in) {
}
@Override
public void write(DataOutput out) throws IOException {
public void write(DataOutput out) {
}
}
@ -125,10 +122,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
@Override
public boolean nextKeyValue() {
if (providedKey) return false;
if (providedKey) {
return false;
}
providedKey = true;
return true;
}
}
}

View File

@ -15,7 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import static org.junit.Assert.assertFalse;
@ -80,8 +79,8 @@ public abstract class TableSnapshotInputFormatTestBase {
throws Exception;
protected abstract void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion, int expectedNumSplits,
boolean shutdownCluster) throws Exception;
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion,
int expectedNumSplits, boolean shutdownCluster) throws Exception;
protected abstract byte[] getStartRow();
@ -158,7 +157,8 @@ public abstract class TableSnapshotInputFormatTestBase {
String snapshotName, Path tmpTableDir) throws Exception;
protected void testWithMapReduce(HBaseTestingUtility util, String snapshotName,
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster) throws Exception {
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster)
throws Exception {
setupCluster();
try {
Path tableDir = util.getDataTestDirOnTestFS(snapshotName);
@ -182,10 +182,11 @@ public abstract class TableSnapshotInputFormatTestBase {
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
}
for (int j = 0; j < FAMILIES.length; j++) {
byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
+ " ,actual:" + Bytes.toString(actual), row, actual);
for (byte[] family : FAMILIES) {
byte[] actual = result.getValue(family, family);
Assert.assertArrayEquals(
"Row in snapshot does not match, expected:" + Bytes.toString(row) + " ,actual:" + Bytes
.toString(actual), row, actual);
}
}
@ -226,5 +227,4 @@ public abstract class TableSnapshotInputFormatTestBase {
admin.flush(tableName);
table.close();
}
}

View File

@ -51,7 +51,6 @@ import org.junit.rules.TestName;
@Category({MapReduceTests.class, LargeTests.class})
public class TestCellCounter {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCellCounter.class);
@ -93,8 +92,7 @@ public class TestCellCounter {
public void testCellCounter() throws Exception {
final TableName sourceTable = TableName.valueOf(name.getMethodName());
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@ -107,8 +105,8 @@ public class TestCellCounter {
t.put(p);
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1" };
runCount(args);
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
"part-r-00000");
FileInputStream inputStream =
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
String data = IOUtils.toString(inputStream);
inputStream.close();
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@ -118,8 +116,7 @@ public class TestCellCounter {
assertTrue(data.contains("a;q" + "\t" + "1"));
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
}finally{
t.close();
} finally {
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
}
@ -131,8 +128,7 @@ public class TestCellCounter {
public void testCellCounterPrefix() throws Exception {
final TableName sourceTable = TableName.valueOf(name.getMethodName());
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try {
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@ -146,7 +142,7 @@ public class TestCellCounter {
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "\\x01row1" };
runCount(args);
FileInputStream inputStream =
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
String data = IOUtils.toString(inputStream);
inputStream.close();
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@ -157,7 +153,6 @@ public class TestCellCounter {
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
} finally {
t.close();
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
}
@ -169,8 +164,7 @@ public class TestCellCounter {
public void testCellCounterStartTimeRange() throws Exception {
final TableName sourceTable = TableName.valueOf(name.getMethodName());
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@ -181,13 +175,11 @@ public class TestCellCounter {
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
t.put(p);
String[] args = {
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
"--starttime=" + now,
"--endtime=" + now + 2 };
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
"--starttime=" + now, "--endtime=" + now + 2 };
runCount(args);
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
"part-r-00000");
FileInputStream inputStream =
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
String data = IOUtils.toString(inputStream);
inputStream.close();
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@ -197,8 +189,7 @@ public class TestCellCounter {
assertTrue(data.contains("a;q" + "\t" + "1"));
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
}finally{
t.close();
} finally {
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
}
@ -210,8 +201,7 @@ public class TestCellCounter {
public void testCellCounteEndTimeRange() throws Exception {
final TableName sourceTable = TableName.valueOf(name.getMethodName());
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@ -222,12 +212,11 @@ public class TestCellCounter {
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
t.put(p);
String[] args = {
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
"--endtime=" + now + 1 };
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
"--endtime=" + now + 1 };
runCount(args);
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
"part-r-00000");
FileInputStream inputStream =
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
String data = IOUtils.toString(inputStream);
inputStream.close();
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@ -237,8 +226,7 @@ public class TestCellCounter {
assertTrue(data.contains("a;q" + "\t" + "1"));
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
}finally{
t.close();
} finally {
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
}
@ -250,8 +238,7 @@ public class TestCellCounter {
public void testCellCounteOutOfTimeRange() throws Exception {
final TableName sourceTable = TableName.valueOf(name.getMethodName());
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
try (Table t = UTIL.createTable(sourceTable, families)) {
Put p = new Put(ROW1);
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@ -262,24 +249,22 @@ public class TestCellCounter {
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
t.put(p);
String[] args = {
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1,
String[] args =
{ sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1,
"--endtime=" + now + 2 };
runCount(args);
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
"part-r-00000");
FileInputStream inputStream =
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
String data = IOUtils.toString(inputStream);
inputStream.close();
inputStream.close();
// nothing should hace been emitted to the reducer
assertTrue(data.isEmpty());
}finally{
t.close();
} finally {
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
}
private boolean runCount(String[] args) throws Exception {
// need to make a copy of the configuration because to make sure
// different temp dirs are used.

View File

@ -52,7 +52,6 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
*/
@Category(LargeTests.class)
public class TestHashTable {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHashTable.class);
@ -109,11 +108,12 @@ public class TestHashTable {
long batchSize = 300;
int code = hashTable.run(new String[] {
"--batchsize=" + batchSize,
"--numhashfiles=" + numHashFiles,
"--scanbatch=2",
tableName.getNameAsString(),
testDir.toString()});
"--batchsize=" + batchSize,
"--numhashfiles=" + numHashFiles,
"--scanbatch=2",
tableName.getNameAsString(),
testDir.toString()
});
assertEquals("test job failed", 0, code);
FileSystem fs = TEST_UTIL.getTestFileSystem();
@ -159,7 +159,7 @@ public class TestHashTable {
MapFile.Reader reader = new MapFile.Reader(hashPath, fs.getConf());
ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable hash = new ImmutableBytesWritable();
while(reader.next(key, hash)) {
while (reader.next(key, hash)) {
String keyString = Bytes.toHex(key.get(), key.getOffset(), key.getLength());
LOG.debug("Key: " + (keyString.isEmpty() ? "-1" : Integer.parseInt(keyString, 16))
+ " Hash: " + Bytes.toHex(hash.get(), hash.getOffset(), hash.getLength()));
@ -194,6 +194,4 @@ public class TestHashTable {
TEST_UTIL.deleteTable(tableName);
TEST_UTIL.cleanupDataTestDirOnTestFS();
}
}

View File

@ -46,7 +46,6 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
*/
@Category({MapReduceTests.class, SmallTests.class})
public class TestImportTsvParser {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestImportTsvParser.class);
@ -165,7 +164,7 @@ public class TestImportTsvParser {
byte[] line = Bytes.toBytes("rowkey\t1234\tval_a");
ParsedLine parsed = parser.parse(line, line.length);
assertEquals(1234l, parsed.getTimestamp(-1));
assertEquals(1234L, parsed.getTimestamp(-1));
checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line)));
}
@ -230,9 +229,9 @@ public class TestImportTsvParser {
line = Bytes.toBytes("\t\tval_a\t1234");
parser.parseRowKey(line, line.length);
fail("Should get BadTsvLineException on empty rowkey.");
} catch (BadTsvLineException b) {
} catch (BadTsvLineException ignored) {
}
parser = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
assertEquals(1, parser.getRowKeyColumnIndex());
line = Bytes.toBytes("val_a\trowkey\t1234");
@ -243,9 +242,9 @@ public class TestImportTsvParser {
line = Bytes.toBytes("val_a");
rowKeyOffsets = parser.parseRowKey(line, line.length);
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
} catch (BadTsvLineException b) {
} catch (BadTsvLineException ignored) {
}
parser = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
assertEquals(2, parser.getRowKeyColumnIndex());
line = Bytes.toBytes("val_a\t1234\trowkey");
@ -262,15 +261,15 @@ public class TestImportTsvParser {
ParsedLine parse = parser.parse(line, line.length);
assertEquals(18, parse.getAttributeKeyOffset());
assertEquals(3, parser.getAttributesKeyColumnIndex());
String attributes[] = parse.getIndividualAttributes();
assertEquals(attributes[0], "key=>value");
String[] attributes = parse.getIndividualAttributes();
assertEquals("key=>value", attributes[0]);
try {
line = Bytes.toBytes("rowkey\tval_a\t1234");
parser.parse(line, line.length);
fail("Should get BadTsvLineException on empty rowkey.");
} catch (BadTsvLineException b) {
} catch (BadTsvLineException ignored) {
}
parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
assertEquals(2, parser.getRowKeyColumnIndex());
line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234");
@ -278,14 +277,14 @@ public class TestImportTsvParser {
assertEquals(0, parse.getAttributeKeyOffset());
assertEquals(0, parser.getAttributesKeyColumnIndex());
attributes = parse.getIndividualAttributes();
assertEquals(attributes[0], "key=>value");
assertEquals("key=>value", attributes[0]);
try {
line = Bytes.toBytes("val_a");
ParsedLine parse2 = parser.parse(line, line.length);
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
} catch (BadTsvLineException b) {
} catch (BadTsvLineException ignored) {
}
parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
assertEquals(3, parser.getRowKeyColumnIndex());
line = Bytes.toBytes("val_a\tkey0=>value0,key1=>value1,key2=>value2\t1234\trowkey");
@ -294,8 +293,8 @@ public class TestImportTsvParser {
assertEquals(6, parse.getAttributeKeyOffset());
String[] attr = parse.getIndividualAttributes();
int i = 0;
for(String str : attr) {
assertEquals(("key"+i+"=>"+"value"+i), str );
for (String str : attr) {
assertEquals(("key" + i + "=>" + "value" + i), str);
i++;
}
}
@ -310,9 +309,8 @@ public class TestImportTsvParser {
ParsedLine parse = parser.parse(line, line.length);
assertEquals(18, parse.getAttributeKeyOffset());
assertEquals(3, parser.getAttributesKeyColumnIndex());
String attributes[] = parse.getIndividualAttributes();
assertEquals(attributes[0], "key=>value");
String[] attributes = parse.getIndividualAttributes();
assertEquals("key=>value", attributes[0]);
assertEquals(29, parse.getCellVisibilityColumnOffset());
}
}

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapreduce;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -55,7 +54,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
*/
@Category(LargeTests.class)
public class TestSyncTable {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSyncTable.class);
@ -230,10 +228,8 @@ public class TestSyncTable {
targetTable.close();
}
private void assertTargetDoDeletesFalse(int expectedRows, TableName
sourceTableName,
TableName targetTableName)
throws Exception {
private void assertTargetDoDeletesFalse(int expectedRows, TableName sourceTableName,
TableName targetTableName) throws Exception {
Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
@ -242,7 +238,7 @@ public class TestSyncTable {
Result targetRow = targetScanner.next();
Result sourceRow = sourceScanner.next();
int rowsCount = 0;
while (targetRow!=null) {
while (targetRow != null) {
rowsCount++;
//only compares values for existing rows, skipping rows existing on
//target only that were not deleted given --doDeletes=false
@ -292,7 +288,7 @@ public class TestSyncTable {
if (!CellUtil.matchingQualifier(sourceCell, targetCell)) {
Assert.fail("Qualifiers don't match");
}
if(targetRowKey < 80 && targetRowKey >= 90){
if (targetRowKey < 80 && targetRowKey >= 90){
if (!CellUtil.matchingTimestamp(sourceCell, targetCell)) {
Assert.fail("Timestamps don't match");
}
@ -317,10 +313,8 @@ public class TestSyncTable {
targetTable.close();
}
private void assertTargetDoPutsFalse(int expectedRows, TableName
sourceTableName,
TableName targetTableName)
throws Exception {
private void assertTargetDoPutsFalse(int expectedRows, TableName sourceTableName,
TableName targetTableName) throws Exception {
Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
@ -432,18 +426,18 @@ public class TestSyncTable {
return syncTable.counters;
}
private void hashSourceTable(TableName sourceTableName, Path testDir)
throws Exception, IOException {
private void hashSourceTable(TableName sourceTableName, Path testDir) throws Exception {
int numHashFiles = 3;
long batchSize = 100; // should be 2 batches per region
int scanBatch = 1;
HashTable hashTable = new HashTable(TEST_UTIL.getConfiguration());
int code = hashTable.run(new String[] {
"--batchsize=" + batchSize,
"--numhashfiles=" + numHashFiles,
"--scanbatch=" + scanBatch,
sourceTableName.getNameAsString(),
testDir.toString()});
"--batchsize=" + batchSize,
"--numhashfiles=" + numHashFiles,
"--scanbatch=" + scanBatch,
sourceTableName.getNameAsString(),
testDir.toString()
});
assertEquals("hash table job failed", 0, code);
FileSystem fs = TEST_UTIL.getTestFileSystem();
@ -570,6 +564,4 @@ public class TestSyncTable {
sourceTable.close();
targetTable.close();
}
}

View File

@ -37,7 +37,6 @@ import org.junit.rules.TestName;
@Category({MapReduceTests.class, SmallTests.class})
public class TestTableSplit {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestTableSplit.class);
@ -53,12 +52,12 @@ public class TestTableSplit {
TableSplit split2 = new TableSplit(TableName.valueOf(name.getMethodName()),
Bytes.toBytes("row-start"),
Bytes.toBytes("row-end"), "location");
assertEquals (split1, split2);
assertTrue (split1.hashCode() == split2.hashCode());
assertEquals(split1, split2);
assertTrue(split1.hashCode() == split2.hashCode());
HashSet<TableSplit> set = new HashSet<>(2);
set.add(split1);
set.add(split2);
assertTrue(set.size() == 1);
assertEquals(1, set.size());
}
/**
@ -73,12 +72,12 @@ public class TestTableSplit {
Bytes.toBytes("row-start"),
Bytes.toBytes("row-end"), "location", 1982);
assertEquals (split1, split2);
assertTrue (split1.hashCode() == split2.hashCode());
assertEquals(split1, split2);
assertTrue(split1.hashCode() == split2.hashCode());
HashSet<TableSplit> set = new HashSet<>(2);
set.add(split1);
set.add(split2);
assertTrue(set.size() == 1);
assertEquals(1, set.size());
}
/**
@ -118,14 +117,14 @@ public class TestTableSplit {
+ "encoded region name: encoded-region-name)";
Assert.assertEquals(str, split.toString());
split = new TableSplit((TableName) null, null, null, null);
split = new TableSplit(null, null, null, null);
str =
"HBase table split(table name: null, scan: , start row: null, "
+ "end row: null, region location: null, "
+ "encoded region name: )";
Assert.assertEquals(str, split.toString());
split = new TableSplit((TableName) null, null, null, null, null, null, 1000L);
split = new TableSplit(null, null, null, null, null, null, 1000L);
str =
"HBase table split(table name: null, scan: , start row: null, "
+ "end row: null, region location: null, "

View File

@ -17,16 +17,16 @@
*/
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.KeyValue;
import java.io.IOException;
/**
* Dummy mapper used for unit tests to verify that the mapper can be injected.
@ -34,7 +34,6 @@ import java.io.IOException;
* reading the input data before writing it to HFiles.
*/
public class TsvImporterCustomTestMapper extends TsvImporterMapper {
@Override
protected void setup(Context context) {
doSetup(context);

View File

@ -20,9 +20,9 @@
CounterGroupName= HBase Performance Evaluation
ELAPSED_TIME.name= Elapsed time in milliseconds
ROWS.name= Row count
ROWS.name= Row count
# ResourceBundle properties file for Map-Reduce counters
CounterGroupName= HBase Performance Evaluation
ELAPSED_TIME.name= Elapsed time in milliseconds
ROWS.name= Row count
ROWS.name= Row count