HBASE-12052: BulkLoad Failed due to no write permission on input files - Addendum

This commit is contained in:
Jeffrey Zhong 2014-09-26 17:51:29 -07:00
parent 8ee39f1971
commit 4e56a19cf1
2 changed files with 4 additions and 0 deletions

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
@ -72,6 +73,7 @@ public class TestLoadIncrementalHFiles {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,"");
util.getConfiguration().setInt(
LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY,
MAX_FILES_PER_REGION_PER_FAMILY);

View File

@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
@ -217,6 +218,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
@BeforeClass
public static void setupCluster() throws Exception {
util = new HBaseTestingUtility();
util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,"");
util.startMiniCluster(1);
}