HBASE-9714 add scan logging to IntegrationTestLoadAndVerify
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1530065 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c28b58f280
commit
6366c25d95
|
@ -47,10 +47,12 @@ import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
|
import org.apache.hadoop.hbase.client.ScannerCallable;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.mapreduce.NMapInputFormat;
|
import org.apache.hadoop.hbase.mapreduce.NMapInputFormat;
|
||||||
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
|
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
|
||||||
import org.apache.hadoop.hbase.mapreduce.TableMapper;
|
import org.apache.hadoop.hbase.mapreduce.TableMapper;
|
||||||
|
import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl;
|
||||||
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
|
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.io.BytesWritable;
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
|
@ -316,6 +318,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
|
||||||
job.setMapperClass(LoadMapper.class);
|
job.setMapperClass(LoadMapper.class);
|
||||||
job.setInputFormatClass(NMapInputFormat.class);
|
job.setInputFormatClass(NMapInputFormat.class);
|
||||||
job.setNumReduceTasks(0);
|
job.setNumReduceTasks(0);
|
||||||
|
setJobScannerConf(job);
|
||||||
FileOutputFormat.setOutputPath(job, outputDir);
|
FileOutputFormat.setOutputPath(job, outputDir);
|
||||||
|
|
||||||
TableMapReduceUtil.addDependencyJars(job);
|
TableMapReduceUtil.addDependencyJars(job);
|
||||||
|
@ -331,6 +334,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
|
||||||
Job job = new Job(conf);
|
Job job = new Job(conf);
|
||||||
job.setJarByClass(this.getClass());
|
job.setJarByClass(this.getClass());
|
||||||
job.setJobName(TEST_NAME + " Verification for " + htd.getTableName());
|
job.setJobName(TEST_NAME + " Verification for " + htd.getTableName());
|
||||||
|
setJobScannerConf(job);
|
||||||
|
|
||||||
Scan scan = new Scan();
|
Scan scan = new Scan();
|
||||||
|
|
||||||
|
@ -350,6 +354,13 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
|
||||||
assertEquals(0, numOutputRecords);
|
assertEquals(0, numOutputRecords);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void setJobScannerConf(Job job) {
|
||||||
|
// Make sure scanners log something useful to make debugging possible.
|
||||||
|
job.getConfiguration().setBoolean(ScannerCallable.LOG_SCANNER_ACTIVITY, true);
|
||||||
|
long lpr = job.getConfiguration().getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT) / 100;
|
||||||
|
job.getConfiguration().setInt(TableRecordReaderImpl.LOG_PER_ROW_COUNT, (int)lpr);
|
||||||
|
}
|
||||||
|
|
||||||
public Path getTestDir(String testName, String subdir) throws IOException {
|
public Path getTestDir(String testName, String subdir) throws IOException {
|
||||||
//HBaseTestingUtility.getDataTestDirOnTestFs() has not been backported.
|
//HBaseTestingUtility.getDataTestDirOnTestFs() has not been backported.
|
||||||
FileSystem fs = FileSystem.get(getConf());
|
FileSystem fs = FileSystem.get(getConf());
|
||||||
|
|
Loading…
Reference in New Issue