HBASE-8935 IntegrationTestBigLinkedList fails under load on 0.94 due to some scan issues - add logging

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1503524 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
sershe 2013-07-15 23:53:03 +00:00
parent 8eb2f3709a
commit a96d77ffd5
3 changed files with 12 additions and 1 deletions

View File

@ -51,9 +51,11 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.ScannerCallable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
@ -624,6 +626,8 @@ public class IntegrationTestBigLinkedList extends Configured implements Tool {
job.setNumReduceTasks(numReducers); job.setNumReduceTasks(numReducers);
job.setJarByClass(getClass()); job.setJarByClass(getClass());
setJobScannerConf(job);
Scan scan = new Scan(); Scan scan = new Scan();
scan.addColumn(FAMILY_NAME, COLUMN_PREV); scan.addColumn(FAMILY_NAME, COLUMN_PREV);
scan.setCaching(10000); scan.setCaching(10000);
@ -1065,4 +1069,10 @@ public class IntegrationTestBigLinkedList extends Configured implements Tool {
job.getConfiguration().setInt(GENERATOR_WRAP_KEY, wrapMuplitplier.intValue()); job.getConfiguration().setInt(GENERATOR_WRAP_KEY, wrapMuplitplier.intValue());
} }
} }
private static void setJobScannerConf(Job job) {
// Make sure scanners log something useful to make debugging possible.
job.getConfiguration().setBoolean(ScannerCallable.LOG_SCANNER_ACTIVITY, true);
job.getConfiguration().setInt(TableRecordReaderImpl.LOG_PER_ROW_COUNT, 100000);
}
} }

View File

@ -52,7 +52,7 @@ public class TableRecordReaderImpl {
public static final String LOG_PER_ROW_COUNT public static final String LOG_PER_ROW_COUNT
= "hbase.mapreduce.log.scanner.rowcount"; = "hbase.mapreduce.log.scanner.rowcount";
static final Log LOG = LogFactory.getLog(TableRecordReader.class); static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);
// HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase // HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase
private static final String HBASE_COUNTER_GROUP_NAME = private static final String HBASE_COUNTER_GROUP_NAME =

View File

@ -3016,6 +3016,7 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa
if (request.hasScannerId()) { if (request.hasScannerId()) {
rsh = scanners.get(scannerName); rsh = scanners.get(scannerName);
if (rsh == null) { if (rsh == null) {
LOG.info("Client tried to access missing scanner " + scannerName);
throw new UnknownScannerException( throw new UnknownScannerException(
"Name: " + scannerName + ", already closed?"); "Name: " + scannerName + ", already closed?");
} }