HBASE-3271 Allow .META. table to be exported (Liang Xie)
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1376487 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
82609fa7b1
commit
8c811a6994
|
@ -32,11 +32,14 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.util.Addressing;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
|
@ -158,7 +161,17 @@ extends InputFormat<ImmutableBytesWritable, Result> {
|
|||
Pair<byte[][], byte[][]> keys = table.getStartEndKeys();
|
||||
if (keys == null || keys.getFirst() == null ||
|
||||
keys.getFirst().length == 0) {
|
||||
throw new IOException("Expecting at least one region.");
|
||||
HRegionLocation regLoc = table.getRegionLocation(
|
||||
HConstants.EMPTY_BYTE_ARRAY, false);
|
||||
if (null == regLoc) {
|
||||
throw new IOException("Expecting at least one region.");
|
||||
}
|
||||
List<InputSplit> splits = new ArrayList<InputSplit>(1);
|
||||
InputSplit split = new TableSplit(table.getTableName(),
|
||||
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc
|
||||
.getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0]);
|
||||
splits.add(split);
|
||||
return splits;
|
||||
}
|
||||
List<InputSplit> splits = new ArrayList<InputSplit>(keys.getFirst().length);
|
||||
for (int i = 0; i < keys.getFirst().length; i++) {
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.mapreduce;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -44,7 +45,6 @@ import org.junit.Before;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestImportExport {
|
||||
|
@ -142,6 +142,26 @@ public class TestImportExport {
|
|||
assertEquals(3, r.size());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test export .META. table
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testMetaExport() throws Exception {
|
||||
String EXPORT_TABLE = ".META.";
|
||||
String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1", "0", "0" };
|
||||
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
|
||||
cluster.getConfiguration()), args);
|
||||
Configuration conf = opts.getConfiguration();
|
||||
args = opts.getRemainingArgs();
|
||||
|
||||
Job job = Export.createSubmittableJob(conf, args);
|
||||
job.getConfiguration().set("mapreduce.framework.name", "yarn");
|
||||
job.waitForCompletion(false);
|
||||
assertTrue(job.isSuccessful());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithDeletes() throws Exception {
|
||||
String EXPORT_TABLE = "exportWithDeletes";
|
||||
|
|
Loading…
Reference in New Issue