HBASE-11819 Unit test for CoprocessorHConnection (Talat Uyarer)

This commit is contained in:
stack 2014-10-30 14:33:29 -07:00
parent 7886c0b82f
commit a404db52ec
1 changed files with 162 additions and 0 deletions

View File

@ -0,0 +1,162 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
* law or agreed to in writing, software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License
* for the specific language governing permissions and limitations under the License.
*/
package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.CoprocessorHConnection;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({CoprocessorTests.class, MediumTests.class})
public class TestCoprocessorHConnection {
static final Log LOG = LogFactory.getLog(TestCoprocessorHConnection.class);
public final static byte[] A = Bytes.toBytes("a");
private static final int ROWSIZE = 20;
private static final byte[] rowSeperator1 = Bytes.toBytes(5);
private static final byte[] rowSeperator2 = Bytes.toBytes(12);
private static HBaseTestingUtility util = new HBaseTestingUtility();
private static MiniHBaseCluster cluster = null;
public static class FooCoprocessor extends BaseRegionObserver {
private HRegion region;
private ClusterConnection conn;
private CoprocessorEnvironment env;
@Override
public void start(CoprocessorEnvironment e) {
region = ((RegionCoprocessorEnvironment)e).getRegion();
env = e;
}
@Override
public void stop(CoprocessorEnvironment e) {
region = null;
}
public byte[] getRegionStartKey() {
return region.getStartKey();
}
public Result getOnCoprocessorHConnection(TableName tableName, byte[] key)
throws IOException {
conn = CoprocessorHConnection.getConnectionForEnvironment(env);
Table hTable = conn.getTable(tableName);
Get get = new Get(key);
Result result = hTable.get(get);
return result;
}
}
@BeforeClass
public static void setupBeforeClass() throws Exception {
util.startMiniCluster();
cluster = util.getMiniHBaseCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster();
}
@Test
public void testHConnection() throws Exception {
Admin admin = util.getHBaseAdmin();
TableName testTable = TableName.valueOf("TestHConnection");
try {
// Check table exists
if (admin.tableExists(testTable)) {
admin.disableTable(testTable);
admin.deleteTable(testTable);
}
HTableDescriptor htd = new HTableDescriptor(testTable);
htd.addFamily(new HColumnDescriptor(A));
// Register FooCoprocessor as a table coprocessor
htd.addCoprocessor(FooCoprocessor.class.getName());
// Create a table with 3 region
admin.createTable(htd, new byte[][] { rowSeperator1, rowSeperator2 });
util.waitUntilAllRegionsAssigned(testTable);
} finally {
admin.close();
}
//Create Connection and get Table
Connection connection = ConnectionFactory.createConnection(util.getConfiguration());
Table table = connection.getTable(testTable);
try{
// Put some data
for (long i = 0; i < ROWSIZE; i++) {
byte[] iBytes = Bytes.toBytes(i);
Put put = new Put(iBytes);
put.add(A, A, iBytes);
table.put(put);
}
// Get Table's First Region
HRegion firstRegion = cluster.getRegions(testTable).get(0);
// Look up the coprocessor instance running the Region
Coprocessor cp = firstRegion.getCoprocessorHost().findCoprocessor(FooCoprocessor.class.getName());
assertNotNull("FooCoprocessor coprocessor should be loaded", cp);
FooCoprocessor fc = (FooCoprocessor) cp;
// Find the start key for the region that FooCoprocessor is running on.
byte[] regionStartKey = fc.getRegionStartKey();
// Get Key Data
Get get = new Get(regionStartKey);
Result keyData = table.get(get);
// Get Key Data using with CoprocessorHConnection
Result cpData = fc.getOnCoprocessorHConnection(testTable, regionStartKey);
// Check them equals
assertEquals(keyData.getValue(A, A), cpData.getValue(A, A));
} finally {
table.close();
connection.close();
}
}
}