HBASE-23635 Reduced number of Checkstyle violations in hbase-mapreduce
Signed-off-by: Viraj Jasani <vjasani@apache.org> Signed-off-by: stack <stack@apache.org>
This commit is contained in:
parent
f98085e44e
commit
085450ba4e
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
import static org.junit.Assert.assertNotNull;
|
import static org.junit.Assert.assertNotNull;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
@ -53,7 +54,6 @@ import org.apache.hbase.thirdparty.com.google.gson.Gson;
|
||||||
|
|
||||||
@Category({MiscTests.class, SmallTests.class})
|
@Category({MiscTests.class, SmallTests.class})
|
||||||
public class TestPerformanceEvaluation {
|
public class TestPerformanceEvaluation {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
|
HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
|
||||||
|
@ -64,19 +64,19 @@ public class TestPerformanceEvaluation {
|
||||||
public void testDefaultInMemoryCompaction() {
|
public void testDefaultInMemoryCompaction() {
|
||||||
PerformanceEvaluation.TestOptions defaultOpts =
|
PerformanceEvaluation.TestOptions defaultOpts =
|
||||||
new PerformanceEvaluation.TestOptions();
|
new PerformanceEvaluation.TestOptions();
|
||||||
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
|
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
|
||||||
defaultOpts.getInMemoryCompaction().toString());
|
defaultOpts.getInMemoryCompaction().toString());
|
||||||
HTableDescriptor htd = PerformanceEvaluation.getTableDescriptor(defaultOpts);
|
HTableDescriptor htd = PerformanceEvaluation.getTableDescriptor(defaultOpts);
|
||||||
for (HColumnDescriptor hcd: htd.getFamilies()) {
|
for (HColumnDescriptor hcd: htd.getFamilies()) {
|
||||||
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
|
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
|
||||||
hcd.getInMemoryCompaction().toString());
|
hcd.getInMemoryCompaction().toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSerialization() throws IOException {
|
public void testSerialization() {
|
||||||
PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
|
PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
|
||||||
assertTrue(!options.isAutoFlush());
|
assertFalse(options.isAutoFlush());
|
||||||
options.setAutoFlush(true);
|
options.setAutoFlush(true);
|
||||||
Gson gson = GsonUtil.createGson().create();
|
Gson gson = GsonUtil.createGson().create();
|
||||||
String optionsString = gson.toJson(options);
|
String optionsString = gson.toJson(options);
|
||||||
|
@ -101,8 +101,7 @@ public class TestPerformanceEvaluation {
|
||||||
long len = fs.getFileStatus(p).getLen();
|
long len = fs.getFileStatus(p).getLen();
|
||||||
assertTrue(len > 0);
|
assertTrue(len > 0);
|
||||||
byte[] content = new byte[(int) len];
|
byte[] content = new byte[(int) len];
|
||||||
FSDataInputStream dis = fs.open(p);
|
try (FSDataInputStream dis = fs.open(p)) {
|
||||||
try {
|
|
||||||
dis.readFully(content);
|
dis.readFully(content);
|
||||||
BufferedReader br = new BufferedReader(
|
BufferedReader br = new BufferedReader(
|
||||||
new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8));
|
new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8));
|
||||||
|
@ -111,8 +110,6 @@ public class TestPerformanceEvaluation {
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
assertEquals(clients, count);
|
assertEquals(clients, count);
|
||||||
} finally {
|
|
||||||
dis.close();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,9 +167,8 @@ public class TestPerformanceEvaluation {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testZipfian()
|
public void testZipfian() throws NoSuchMethodException, SecurityException, InstantiationException,
|
||||||
throws NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException,
|
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
|
||||||
IllegalArgumentException, InvocationTargetException {
|
|
||||||
TestOptions opts = new PerformanceEvaluation.TestOptions();
|
TestOptions opts = new PerformanceEvaluation.TestOptions();
|
||||||
opts.setValueZipf(true);
|
opts.setValueZipf(true);
|
||||||
final int valueSize = 1024;
|
final int valueSize = 1024;
|
||||||
|
@ -197,10 +193,10 @@ public class TestPerformanceEvaluation {
|
||||||
public void testSetBufferSizeOption() {
|
public void testSetBufferSizeOption() {
|
||||||
TestOptions opts = new PerformanceEvaluation.TestOptions();
|
TestOptions opts = new PerformanceEvaluation.TestOptions();
|
||||||
long bufferSize = opts.getBufferSize();
|
long bufferSize = opts.getBufferSize();
|
||||||
assertEquals(bufferSize, 2l * 1024l * 1024l);
|
assertEquals(bufferSize, 2L * 1024L * 1024L);
|
||||||
opts.setBufferSize(64l * 1024l);
|
opts.setBufferSize(64L * 1024L);
|
||||||
bufferSize = opts.getBufferSize();
|
bufferSize = opts.getBufferSize();
|
||||||
assertEquals(bufferSize, 64l * 1024l);
|
assertEquals(bufferSize, 64L * 1024L);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -265,7 +261,7 @@ public class TestPerformanceEvaluation {
|
||||||
assertNotNull(options);
|
assertNotNull(options);
|
||||||
assertNotNull(options.getCmdName());
|
assertNotNull(options.getCmdName());
|
||||||
assertEquals(cmdName, options.getCmdName());
|
assertEquals(cmdName, options.getCmdName());
|
||||||
assertTrue(options.getMultiPut() == 10);
|
assertEquals(10, options.getMultiPut());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -288,6 +284,6 @@ public class TestPerformanceEvaluation {
|
||||||
assertNotNull(options);
|
assertNotNull(options);
|
||||||
assertNotNull(options.getCmdName());
|
assertNotNull(options.getCmdName());
|
||||||
assertEquals(cmdName, options.getCmdName());
|
assertEquals(cmdName, options.getCmdName());
|
||||||
assertTrue(options.getConnCount() == 10);
|
assertEquals(10, options.getConnCount());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -44,14 +43,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RecordReader<NullWritable, NullWritable> createRecordReader(
|
public RecordReader<NullWritable, NullWritable> createRecordReader(
|
||||||
InputSplit split,
|
InputSplit split, TaskAttemptContext tac) {
|
||||||
TaskAttemptContext tac) throws IOException, InterruptedException {
|
|
||||||
return new SingleRecordReader<>(NullWritable.get(), NullWritable.get());
|
return new SingleRecordReader<>(NullWritable.get(), NullWritable.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<InputSplit> getSplits(JobContext context) throws IOException,
|
public List<InputSplit> getSplits(JobContext context) {
|
||||||
InterruptedException {
|
|
||||||
int count = getNumMapTasks(context.getConfiguration());
|
int count = getNumMapTasks(context.getConfiguration());
|
||||||
List<InputSplit> splits = new ArrayList<>(count);
|
List<InputSplit> splits = new ArrayList<>(count);
|
||||||
for (int i = 0; i < count; i++) {
|
for (int i = 0; i < count; i++) {
|
||||||
|
@ -70,21 +67,21 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
|
||||||
|
|
||||||
private static class NullInputSplit extends InputSplit implements Writable {
|
private static class NullInputSplit extends InputSplit implements Writable {
|
||||||
@Override
|
@Override
|
||||||
public long getLength() throws IOException, InterruptedException {
|
public long getLength() {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String[] getLocations() throws IOException, InterruptedException {
|
public String[] getLocations() {
|
||||||
return new String[] {};
|
return new String[] {};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFields(DataInput in) throws IOException {
|
public void readFields(DataInput in) {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void write(DataOutput out) throws IOException {
|
public void write(DataOutput out) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,10 +122,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean nextKeyValue() {
|
public boolean nextKeyValue() {
|
||||||
if (providedKey) return false;
|
if (providedKey) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
providedKey = true;
|
providedKey = true;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.mapreduce;
|
package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
import static org.junit.Assert.assertFalse;
|
import static org.junit.Assert.assertFalse;
|
||||||
|
@ -80,8 +79,8 @@ public abstract class TableSnapshotInputFormatTestBase {
|
||||||
throws Exception;
|
throws Exception;
|
||||||
|
|
||||||
protected abstract void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
|
protected abstract void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
|
||||||
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion, int expectedNumSplits,
|
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion,
|
||||||
boolean shutdownCluster) throws Exception;
|
int expectedNumSplits, boolean shutdownCluster) throws Exception;
|
||||||
|
|
||||||
protected abstract byte[] getStartRow();
|
protected abstract byte[] getStartRow();
|
||||||
|
|
||||||
|
@ -158,7 +157,8 @@ public abstract class TableSnapshotInputFormatTestBase {
|
||||||
String snapshotName, Path tmpTableDir) throws Exception;
|
String snapshotName, Path tmpTableDir) throws Exception;
|
||||||
|
|
||||||
protected void testWithMapReduce(HBaseTestingUtility util, String snapshotName,
|
protected void testWithMapReduce(HBaseTestingUtility util, String snapshotName,
|
||||||
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster) throws Exception {
|
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster)
|
||||||
|
throws Exception {
|
||||||
setupCluster();
|
setupCluster();
|
||||||
try {
|
try {
|
||||||
Path tableDir = util.getDataTestDirOnTestFS(snapshotName);
|
Path tableDir = util.getDataTestDirOnTestFS(snapshotName);
|
||||||
|
@ -182,10 +182,11 @@ public abstract class TableSnapshotInputFormatTestBase {
|
||||||
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
|
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int j = 0; j < FAMILIES.length; j++) {
|
for (byte[] family : FAMILIES) {
|
||||||
byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
|
byte[] actual = result.getValue(family, family);
|
||||||
Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
|
Assert.assertArrayEquals(
|
||||||
+ " ,actual:" + Bytes.toString(actual), row, actual);
|
"Row in snapshot does not match, expected:" + Bytes.toString(row) + " ,actual:" + Bytes
|
||||||
|
.toString(actual), row, actual);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,5 +227,4 @@ public abstract class TableSnapshotInputFormatTestBase {
|
||||||
admin.flush(tableName);
|
admin.flush(tableName);
|
||||||
table.close();
|
table.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,6 @@ import org.junit.rules.TestName;
|
||||||
|
|
||||||
@Category({MapReduceTests.class, LargeTests.class})
|
@Category({MapReduceTests.class, LargeTests.class})
|
||||||
public class TestCellCounter {
|
public class TestCellCounter {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestCellCounter.class);
|
HBaseClassTestRule.forClass(TestCellCounter.class);
|
||||||
|
@ -93,8 +92,7 @@ public class TestCellCounter {
|
||||||
public void testCellCounter() throws Exception {
|
public void testCellCounter() throws Exception {
|
||||||
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
||||||
byte[][] families = { FAMILY_A, FAMILY_B };
|
byte[][] families = { FAMILY_A, FAMILY_B };
|
||||||
Table t = UTIL.createTable(sourceTable, families);
|
try (Table t = UTIL.createTable(sourceTable, families)) {
|
||||||
try{
|
|
||||||
Put p = new Put(ROW1);
|
Put p = new Put(ROW1);
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
||||||
|
@ -107,8 +105,8 @@ public class TestCellCounter {
|
||||||
t.put(p);
|
t.put(p);
|
||||||
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1" };
|
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1" };
|
||||||
runCount(args);
|
runCount(args);
|
||||||
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
|
FileInputStream inputStream =
|
||||||
"part-r-00000");
|
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
|
||||||
String data = IOUtils.toString(inputStream);
|
String data = IOUtils.toString(inputStream);
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
||||||
|
@ -118,8 +116,7 @@ public class TestCellCounter {
|
||||||
assertTrue(data.contains("a;q" + "\t" + "1"));
|
assertTrue(data.contains("a;q" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
||||||
}finally{
|
} finally {
|
||||||
t.close();
|
|
||||||
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,8 +128,7 @@ public class TestCellCounter {
|
||||||
public void testCellCounterPrefix() throws Exception {
|
public void testCellCounterPrefix() throws Exception {
|
||||||
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
||||||
byte[][] families = { FAMILY_A, FAMILY_B };
|
byte[][] families = { FAMILY_A, FAMILY_B };
|
||||||
Table t = UTIL.createTable(sourceTable, families);
|
try (Table t = UTIL.createTable(sourceTable, families)) {
|
||||||
try {
|
|
||||||
Put p = new Put(ROW1);
|
Put p = new Put(ROW1);
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
||||||
|
@ -146,7 +142,7 @@ public class TestCellCounter {
|
||||||
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "\\x01row1" };
|
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "\\x01row1" };
|
||||||
runCount(args);
|
runCount(args);
|
||||||
FileInputStream inputStream =
|
FileInputStream inputStream =
|
||||||
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
|
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
|
||||||
String data = IOUtils.toString(inputStream);
|
String data = IOUtils.toString(inputStream);
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
||||||
|
@ -157,7 +153,6 @@ public class TestCellCounter {
|
||||||
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
||||||
} finally {
|
} finally {
|
||||||
t.close();
|
|
||||||
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -169,8 +164,7 @@ public class TestCellCounter {
|
||||||
public void testCellCounterStartTimeRange() throws Exception {
|
public void testCellCounterStartTimeRange() throws Exception {
|
||||||
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
||||||
byte[][] families = { FAMILY_A, FAMILY_B };
|
byte[][] families = { FAMILY_A, FAMILY_B };
|
||||||
Table t = UTIL.createTable(sourceTable, families);
|
try (Table t = UTIL.createTable(sourceTable, families)) {
|
||||||
try{
|
|
||||||
Put p = new Put(ROW1);
|
Put p = new Put(ROW1);
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
||||||
|
@ -181,13 +175,11 @@ public class TestCellCounter {
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
|
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
|
||||||
t.put(p);
|
t.put(p);
|
||||||
String[] args = {
|
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
|
||||||
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
|
"--starttime=" + now, "--endtime=" + now + 2 };
|
||||||
"--starttime=" + now,
|
|
||||||
"--endtime=" + now + 2 };
|
|
||||||
runCount(args);
|
runCount(args);
|
||||||
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
|
FileInputStream inputStream =
|
||||||
"part-r-00000");
|
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
|
||||||
String data = IOUtils.toString(inputStream);
|
String data = IOUtils.toString(inputStream);
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
||||||
|
@ -197,8 +189,7 @@ public class TestCellCounter {
|
||||||
assertTrue(data.contains("a;q" + "\t" + "1"));
|
assertTrue(data.contains("a;q" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
||||||
}finally{
|
} finally {
|
||||||
t.close();
|
|
||||||
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -210,8 +201,7 @@ public class TestCellCounter {
|
||||||
public void testCellCounteEndTimeRange() throws Exception {
|
public void testCellCounteEndTimeRange() throws Exception {
|
||||||
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
||||||
byte[][] families = { FAMILY_A, FAMILY_B };
|
byte[][] families = { FAMILY_A, FAMILY_B };
|
||||||
Table t = UTIL.createTable(sourceTable, families);
|
try (Table t = UTIL.createTable(sourceTable, families)) {
|
||||||
try{
|
|
||||||
Put p = new Put(ROW1);
|
Put p = new Put(ROW1);
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
||||||
|
@ -222,12 +212,11 @@ public class TestCellCounter {
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
|
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
|
||||||
t.put(p);
|
t.put(p);
|
||||||
String[] args = {
|
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
|
||||||
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
|
"--endtime=" + now + 1 };
|
||||||
"--endtime=" + now + 1 };
|
|
||||||
runCount(args);
|
runCount(args);
|
||||||
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
|
FileInputStream inputStream =
|
||||||
"part-r-00000");
|
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
|
||||||
String data = IOUtils.toString(inputStream);
|
String data = IOUtils.toString(inputStream);
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
|
||||||
|
@ -237,8 +226,7 @@ public class TestCellCounter {
|
||||||
assertTrue(data.contains("a;q" + "\t" + "1"));
|
assertTrue(data.contains("a;q" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
|
||||||
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
|
||||||
}finally{
|
} finally {
|
||||||
t.close();
|
|
||||||
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -250,8 +238,7 @@ public class TestCellCounter {
|
||||||
public void testCellCounteOutOfTimeRange() throws Exception {
|
public void testCellCounteOutOfTimeRange() throws Exception {
|
||||||
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
final TableName sourceTable = TableName.valueOf(name.getMethodName());
|
||||||
byte[][] families = { FAMILY_A, FAMILY_B };
|
byte[][] families = { FAMILY_A, FAMILY_B };
|
||||||
Table t = UTIL.createTable(sourceTable, families);
|
try (Table t = UTIL.createTable(sourceTable, families)) {
|
||||||
try{
|
|
||||||
Put p = new Put(ROW1);
|
Put p = new Put(ROW1);
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
|
||||||
|
@ -262,24 +249,22 @@ public class TestCellCounter {
|
||||||
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
|
p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
|
||||||
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
|
p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
|
||||||
t.put(p);
|
t.put(p);
|
||||||
String[] args = {
|
String[] args =
|
||||||
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1,
|
{ sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1,
|
||||||
"--endtime=" + now + 2 };
|
"--endtime=" + now + 2 };
|
||||||
|
|
||||||
runCount(args);
|
runCount(args);
|
||||||
FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
|
FileInputStream inputStream =
|
||||||
"part-r-00000");
|
new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
|
||||||
String data = IOUtils.toString(inputStream);
|
String data = IOUtils.toString(inputStream);
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
// nothing should hace been emitted to the reducer
|
// nothing should hace been emitted to the reducer
|
||||||
assertTrue(data.isEmpty());
|
assertTrue(data.isEmpty());
|
||||||
}finally{
|
} finally {
|
||||||
t.close();
|
|
||||||
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
FileUtil.fullyDelete(new File(OUTPUT_DIR));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private boolean runCount(String[] args) throws Exception {
|
private boolean runCount(String[] args) throws Exception {
|
||||||
// need to make a copy of the configuration because to make sure
|
// need to make a copy of the configuration because to make sure
|
||||||
// different temp dirs are used.
|
// different temp dirs are used.
|
||||||
|
|
|
@ -52,7 +52,6 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
|
||||||
*/
|
*/
|
||||||
@Category(LargeTests.class)
|
@Category(LargeTests.class)
|
||||||
public class TestHashTable {
|
public class TestHashTable {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestHashTable.class);
|
HBaseClassTestRule.forClass(TestHashTable.class);
|
||||||
|
@ -109,11 +108,12 @@ public class TestHashTable {
|
||||||
|
|
||||||
long batchSize = 300;
|
long batchSize = 300;
|
||||||
int code = hashTable.run(new String[] {
|
int code = hashTable.run(new String[] {
|
||||||
"--batchsize=" + batchSize,
|
"--batchsize=" + batchSize,
|
||||||
"--numhashfiles=" + numHashFiles,
|
"--numhashfiles=" + numHashFiles,
|
||||||
"--scanbatch=2",
|
"--scanbatch=2",
|
||||||
tableName.getNameAsString(),
|
tableName.getNameAsString(),
|
||||||
testDir.toString()});
|
testDir.toString()
|
||||||
|
});
|
||||||
assertEquals("test job failed", 0, code);
|
assertEquals("test job failed", 0, code);
|
||||||
|
|
||||||
FileSystem fs = TEST_UTIL.getTestFileSystem();
|
FileSystem fs = TEST_UTIL.getTestFileSystem();
|
||||||
|
@ -159,7 +159,7 @@ public class TestHashTable {
|
||||||
MapFile.Reader reader = new MapFile.Reader(hashPath, fs.getConf());
|
MapFile.Reader reader = new MapFile.Reader(hashPath, fs.getConf());
|
||||||
ImmutableBytesWritable key = new ImmutableBytesWritable();
|
ImmutableBytesWritable key = new ImmutableBytesWritable();
|
||||||
ImmutableBytesWritable hash = new ImmutableBytesWritable();
|
ImmutableBytesWritable hash = new ImmutableBytesWritable();
|
||||||
while(reader.next(key, hash)) {
|
while (reader.next(key, hash)) {
|
||||||
String keyString = Bytes.toHex(key.get(), key.getOffset(), key.getLength());
|
String keyString = Bytes.toHex(key.get(), key.getOffset(), key.getLength());
|
||||||
LOG.debug("Key: " + (keyString.isEmpty() ? "-1" : Integer.parseInt(keyString, 16))
|
LOG.debug("Key: " + (keyString.isEmpty() ? "-1" : Integer.parseInt(keyString, 16))
|
||||||
+ " Hash: " + Bytes.toHex(hash.get(), hash.getOffset(), hash.getLength()));
|
+ " Hash: " + Bytes.toHex(hash.get(), hash.getOffset(), hash.getLength()));
|
||||||
|
@ -194,6 +194,4 @@ public class TestHashTable {
|
||||||
TEST_UTIL.deleteTable(tableName);
|
TEST_UTIL.deleteTable(tableName);
|
||||||
TEST_UTIL.cleanupDataTestDirOnTestFS();
|
TEST_UTIL.cleanupDataTestDirOnTestFS();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
|
||||||
*/
|
*/
|
||||||
@Category({MapReduceTests.class, SmallTests.class})
|
@Category({MapReduceTests.class, SmallTests.class})
|
||||||
public class TestImportTsvParser {
|
public class TestImportTsvParser {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestImportTsvParser.class);
|
HBaseClassTestRule.forClass(TestImportTsvParser.class);
|
||||||
|
@ -165,7 +164,7 @@ public class TestImportTsvParser {
|
||||||
|
|
||||||
byte[] line = Bytes.toBytes("rowkey\t1234\tval_a");
|
byte[] line = Bytes.toBytes("rowkey\t1234\tval_a");
|
||||||
ParsedLine parsed = parser.parse(line, line.length);
|
ParsedLine parsed = parser.parse(line, line.length);
|
||||||
assertEquals(1234l, parsed.getTimestamp(-1));
|
assertEquals(1234L, parsed.getTimestamp(-1));
|
||||||
checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line)));
|
checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,9 +229,9 @@ public class TestImportTsvParser {
|
||||||
line = Bytes.toBytes("\t\tval_a\t1234");
|
line = Bytes.toBytes("\t\tval_a\t1234");
|
||||||
parser.parseRowKey(line, line.length);
|
parser.parseRowKey(line, line.length);
|
||||||
fail("Should get BadTsvLineException on empty rowkey.");
|
fail("Should get BadTsvLineException on empty rowkey.");
|
||||||
} catch (BadTsvLineException b) {
|
} catch (BadTsvLineException ignored) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parser = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
|
parser = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
|
||||||
assertEquals(1, parser.getRowKeyColumnIndex());
|
assertEquals(1, parser.getRowKeyColumnIndex());
|
||||||
line = Bytes.toBytes("val_a\trowkey\t1234");
|
line = Bytes.toBytes("val_a\trowkey\t1234");
|
||||||
|
@ -243,9 +242,9 @@ public class TestImportTsvParser {
|
||||||
line = Bytes.toBytes("val_a");
|
line = Bytes.toBytes("val_a");
|
||||||
rowKeyOffsets = parser.parseRowKey(line, line.length);
|
rowKeyOffsets = parser.parseRowKey(line, line.length);
|
||||||
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
|
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
|
||||||
} catch (BadTsvLineException b) {
|
} catch (BadTsvLineException ignored) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parser = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
|
parser = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
|
||||||
assertEquals(2, parser.getRowKeyColumnIndex());
|
assertEquals(2, parser.getRowKeyColumnIndex());
|
||||||
line = Bytes.toBytes("val_a\t1234\trowkey");
|
line = Bytes.toBytes("val_a\t1234\trowkey");
|
||||||
|
@ -262,15 +261,15 @@ public class TestImportTsvParser {
|
||||||
ParsedLine parse = parser.parse(line, line.length);
|
ParsedLine parse = parser.parse(line, line.length);
|
||||||
assertEquals(18, parse.getAttributeKeyOffset());
|
assertEquals(18, parse.getAttributeKeyOffset());
|
||||||
assertEquals(3, parser.getAttributesKeyColumnIndex());
|
assertEquals(3, parser.getAttributesKeyColumnIndex());
|
||||||
String attributes[] = parse.getIndividualAttributes();
|
String[] attributes = parse.getIndividualAttributes();
|
||||||
assertEquals(attributes[0], "key=>value");
|
assertEquals("key=>value", attributes[0]);
|
||||||
try {
|
try {
|
||||||
line = Bytes.toBytes("rowkey\tval_a\t1234");
|
line = Bytes.toBytes("rowkey\tval_a\t1234");
|
||||||
parser.parse(line, line.length);
|
parser.parse(line, line.length);
|
||||||
fail("Should get BadTsvLineException on empty rowkey.");
|
fail("Should get BadTsvLineException on empty rowkey.");
|
||||||
} catch (BadTsvLineException b) {
|
} catch (BadTsvLineException ignored) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
|
parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
|
||||||
assertEquals(2, parser.getRowKeyColumnIndex());
|
assertEquals(2, parser.getRowKeyColumnIndex());
|
||||||
line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234");
|
line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234");
|
||||||
|
@ -278,14 +277,14 @@ public class TestImportTsvParser {
|
||||||
assertEquals(0, parse.getAttributeKeyOffset());
|
assertEquals(0, parse.getAttributeKeyOffset());
|
||||||
assertEquals(0, parser.getAttributesKeyColumnIndex());
|
assertEquals(0, parser.getAttributesKeyColumnIndex());
|
||||||
attributes = parse.getIndividualAttributes();
|
attributes = parse.getIndividualAttributes();
|
||||||
assertEquals(attributes[0], "key=>value");
|
assertEquals("key=>value", attributes[0]);
|
||||||
try {
|
try {
|
||||||
line = Bytes.toBytes("val_a");
|
line = Bytes.toBytes("val_a");
|
||||||
ParsedLine parse2 = parser.parse(line, line.length);
|
ParsedLine parse2 = parser.parse(line, line.length);
|
||||||
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
|
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
|
||||||
} catch (BadTsvLineException b) {
|
} catch (BadTsvLineException ignored) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
|
parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
|
||||||
assertEquals(3, parser.getRowKeyColumnIndex());
|
assertEquals(3, parser.getRowKeyColumnIndex());
|
||||||
line = Bytes.toBytes("val_a\tkey0=>value0,key1=>value1,key2=>value2\t1234\trowkey");
|
line = Bytes.toBytes("val_a\tkey0=>value0,key1=>value1,key2=>value2\t1234\trowkey");
|
||||||
|
@ -294,8 +293,8 @@ public class TestImportTsvParser {
|
||||||
assertEquals(6, parse.getAttributeKeyOffset());
|
assertEquals(6, parse.getAttributeKeyOffset());
|
||||||
String[] attr = parse.getIndividualAttributes();
|
String[] attr = parse.getIndividualAttributes();
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for(String str : attr) {
|
for (String str : attr) {
|
||||||
assertEquals(("key"+i+"=>"+"value"+i), str );
|
assertEquals(("key" + i + "=>" + "value" + i), str);
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -310,9 +309,8 @@ public class TestImportTsvParser {
|
||||||
ParsedLine parse = parser.parse(line, line.length);
|
ParsedLine parse = parser.parse(line, line.length);
|
||||||
assertEquals(18, parse.getAttributeKeyOffset());
|
assertEquals(18, parse.getAttributeKeyOffset());
|
||||||
assertEquals(3, parser.getAttributesKeyColumnIndex());
|
assertEquals(3, parser.getAttributesKeyColumnIndex());
|
||||||
String attributes[] = parse.getIndividualAttributes();
|
String[] attributes = parse.getIndividualAttributes();
|
||||||
assertEquals(attributes[0], "key=>value");
|
assertEquals("key=>value", attributes[0]);
|
||||||
assertEquals(29, parse.getCellVisibilityColumnOffset());
|
assertEquals(29, parse.getCellVisibilityColumnOffset());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -55,7 +54,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
|
||||||
*/
|
*/
|
||||||
@Category(LargeTests.class)
|
@Category(LargeTests.class)
|
||||||
public class TestSyncTable {
|
public class TestSyncTable {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestSyncTable.class);
|
HBaseClassTestRule.forClass(TestSyncTable.class);
|
||||||
|
@ -230,10 +228,8 @@ public class TestSyncTable {
|
||||||
targetTable.close();
|
targetTable.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertTargetDoDeletesFalse(int expectedRows, TableName
|
private void assertTargetDoDeletesFalse(int expectedRows, TableName sourceTableName,
|
||||||
sourceTableName,
|
TableName targetTableName) throws Exception {
|
||||||
TableName targetTableName)
|
|
||||||
throws Exception {
|
|
||||||
Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
|
Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
|
||||||
Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
|
Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
|
||||||
|
|
||||||
|
@ -242,7 +238,7 @@ public class TestSyncTable {
|
||||||
Result targetRow = targetScanner.next();
|
Result targetRow = targetScanner.next();
|
||||||
Result sourceRow = sourceScanner.next();
|
Result sourceRow = sourceScanner.next();
|
||||||
int rowsCount = 0;
|
int rowsCount = 0;
|
||||||
while (targetRow!=null) {
|
while (targetRow != null) {
|
||||||
rowsCount++;
|
rowsCount++;
|
||||||
//only compares values for existing rows, skipping rows existing on
|
//only compares values for existing rows, skipping rows existing on
|
||||||
//target only that were not deleted given --doDeletes=false
|
//target only that were not deleted given --doDeletes=false
|
||||||
|
@ -292,7 +288,7 @@ public class TestSyncTable {
|
||||||
if (!CellUtil.matchingQualifier(sourceCell, targetCell)) {
|
if (!CellUtil.matchingQualifier(sourceCell, targetCell)) {
|
||||||
Assert.fail("Qualifiers don't match");
|
Assert.fail("Qualifiers don't match");
|
||||||
}
|
}
|
||||||
if(targetRowKey < 80 && targetRowKey >= 90){
|
if (targetRowKey < 80 && targetRowKey >= 90){
|
||||||
if (!CellUtil.matchingTimestamp(sourceCell, targetCell)) {
|
if (!CellUtil.matchingTimestamp(sourceCell, targetCell)) {
|
||||||
Assert.fail("Timestamps don't match");
|
Assert.fail("Timestamps don't match");
|
||||||
}
|
}
|
||||||
|
@ -317,10 +313,8 @@ public class TestSyncTable {
|
||||||
targetTable.close();
|
targetTable.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertTargetDoPutsFalse(int expectedRows, TableName
|
private void assertTargetDoPutsFalse(int expectedRows, TableName sourceTableName,
|
||||||
sourceTableName,
|
TableName targetTableName) throws Exception {
|
||||||
TableName targetTableName)
|
|
||||||
throws Exception {
|
|
||||||
Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
|
Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
|
||||||
Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
|
Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
|
||||||
|
|
||||||
|
@ -432,18 +426,18 @@ public class TestSyncTable {
|
||||||
return syncTable.counters;
|
return syncTable.counters;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void hashSourceTable(TableName sourceTableName, Path testDir)
|
private void hashSourceTable(TableName sourceTableName, Path testDir) throws Exception {
|
||||||
throws Exception, IOException {
|
|
||||||
int numHashFiles = 3;
|
int numHashFiles = 3;
|
||||||
long batchSize = 100; // should be 2 batches per region
|
long batchSize = 100; // should be 2 batches per region
|
||||||
int scanBatch = 1;
|
int scanBatch = 1;
|
||||||
HashTable hashTable = new HashTable(TEST_UTIL.getConfiguration());
|
HashTable hashTable = new HashTable(TEST_UTIL.getConfiguration());
|
||||||
int code = hashTable.run(new String[] {
|
int code = hashTable.run(new String[] {
|
||||||
"--batchsize=" + batchSize,
|
"--batchsize=" + batchSize,
|
||||||
"--numhashfiles=" + numHashFiles,
|
"--numhashfiles=" + numHashFiles,
|
||||||
"--scanbatch=" + scanBatch,
|
"--scanbatch=" + scanBatch,
|
||||||
sourceTableName.getNameAsString(),
|
sourceTableName.getNameAsString(),
|
||||||
testDir.toString()});
|
testDir.toString()
|
||||||
|
});
|
||||||
assertEquals("hash table job failed", 0, code);
|
assertEquals("hash table job failed", 0, code);
|
||||||
|
|
||||||
FileSystem fs = TEST_UTIL.getTestFileSystem();
|
FileSystem fs = TEST_UTIL.getTestFileSystem();
|
||||||
|
@ -570,6 +564,4 @@ public class TestSyncTable {
|
||||||
sourceTable.close();
|
sourceTable.close();
|
||||||
targetTable.close();
|
targetTable.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,6 @@ import org.junit.rules.TestName;
|
||||||
|
|
||||||
@Category({MapReduceTests.class, SmallTests.class})
|
@Category({MapReduceTests.class, SmallTests.class})
|
||||||
public class TestTableSplit {
|
public class TestTableSplit {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestTableSplit.class);
|
HBaseClassTestRule.forClass(TestTableSplit.class);
|
||||||
|
@ -52,12 +51,12 @@ public class TestTableSplit {
|
||||||
TableSplit split2 = new TableSplit(TableName.valueOf(name.getMethodName()),
|
TableSplit split2 = new TableSplit(TableName.valueOf(name.getMethodName()),
|
||||||
"row-start".getBytes(),
|
"row-start".getBytes(),
|
||||||
"row-end".getBytes(), "location");
|
"row-end".getBytes(), "location");
|
||||||
assertEquals (split1, split2);
|
assertEquals(split1, split2);
|
||||||
assertTrue (split1.hashCode() == split2.hashCode());
|
assertTrue(split1.hashCode() == split2.hashCode());
|
||||||
HashSet<TableSplit> set = new HashSet<>(2);
|
HashSet<TableSplit> set = new HashSet<>(2);
|
||||||
set.add(split1);
|
set.add(split1);
|
||||||
set.add(split2);
|
set.add(split2);
|
||||||
assertTrue(set.size() == 1);
|
assertEquals(1, set.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -72,12 +71,12 @@ public class TestTableSplit {
|
||||||
"row-start".getBytes(),
|
"row-start".getBytes(),
|
||||||
"row-end".getBytes(), "location", 1982);
|
"row-end".getBytes(), "location", 1982);
|
||||||
|
|
||||||
assertEquals (split1, split2);
|
assertEquals(split1, split2);
|
||||||
assertTrue (split1.hashCode() == split2.hashCode());
|
assertTrue(split1.hashCode() == split2.hashCode());
|
||||||
HashSet<TableSplit> set = new HashSet<>(2);
|
HashSet<TableSplit> set = new HashSet<>(2);
|
||||||
set.add(split1);
|
set.add(split1);
|
||||||
set.add(split2);
|
set.add(split2);
|
||||||
assertTrue(set.size() == 1);
|
assertEquals(1, set.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -117,14 +116,14 @@ public class TestTableSplit {
|
||||||
+ "encoded region name: encoded-region-name)";
|
+ "encoded region name: encoded-region-name)";
|
||||||
Assert.assertEquals(str, split.toString());
|
Assert.assertEquals(str, split.toString());
|
||||||
|
|
||||||
split = new TableSplit((TableName) null, null, null, null);
|
split = new TableSplit(null, null, null, null);
|
||||||
str =
|
str =
|
||||||
"HBase table split(table name: null, scan: , start row: null, "
|
"HBase table split(table name: null, scan: , start row: null, "
|
||||||
+ "end row: null, region location: null, "
|
+ "end row: null, region location: null, "
|
||||||
+ "encoded region name: )";
|
+ "encoded region name: )";
|
||||||
Assert.assertEquals(str, split.toString());
|
Assert.assertEquals(str, split.toString());
|
||||||
|
|
||||||
split = new TableSplit((TableName) null, null, null, null, null, null, 1000L);
|
split = new TableSplit(null, null, null, null, null, null, 1000L);
|
||||||
str =
|
str =
|
||||||
"HBase table split(table name: null, scan: , start row: null, "
|
"HBase table split(table name: null, scan: , start row: null, "
|
||||||
+ "end row: null, region location: null, "
|
+ "end row: null, region location: null, "
|
||||||
|
@ -132,4 +131,3 @@ public class TestTableSplit {
|
||||||
Assert.assertEquals(str, split.toString());
|
Assert.assertEquals(str, split.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,15 +17,15 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.mapreduce;
|
package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
import org.apache.hadoop.hbase.client.Durability;
|
||||||
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
|
||||||
import org.apache.hadoop.hbase.client.Durability;
|
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Dummy mapper used for unit tests to verify that the mapper can be injected.
|
* Dummy mapper used for unit tests to verify that the mapper can be injected.
|
||||||
|
@ -33,7 +33,6 @@ import java.io.IOException;
|
||||||
* reading the input data before writing it to HFiles.
|
* reading the input data before writing it to HFiles.
|
||||||
*/
|
*/
|
||||||
public class TsvImporterCustomTestMapper extends TsvImporterMapper {
|
public class TsvImporterCustomTestMapper extends TsvImporterMapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void setup(Context context) {
|
protected void setup(Context context) {
|
||||||
doSetup(context);
|
doSetup(context);
|
||||||
|
|
|
@ -20,9 +20,9 @@
|
||||||
|
|
||||||
CounterGroupName= HBase Performance Evaluation
|
CounterGroupName= HBase Performance Evaluation
|
||||||
ELAPSED_TIME.name= Elapsed time in milliseconds
|
ELAPSED_TIME.name= Elapsed time in milliseconds
|
||||||
ROWS.name= Row count
|
ROWS.name= Row count
|
||||||
# ResourceBundle properties file for Map-Reduce counters
|
# ResourceBundle properties file for Map-Reduce counters
|
||||||
|
|
||||||
CounterGroupName= HBase Performance Evaluation
|
CounterGroupName= HBase Performance Evaluation
|
||||||
ELAPSED_TIME.name= Elapsed time in milliseconds
|
ELAPSED_TIME.name= Elapsed time in milliseconds
|
||||||
ROWS.name= Row count
|
ROWS.name= Row count
|
||||||
|
|
Loading…
Reference in New Issue