HBASE-14794 Cleanup TestAtomicOperation

hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
      Fix a few missing table closes (This suite seems to leave loads of threads
      when test is done but have not figured the how yet).

    hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
      Fix some missing table closes. We were leaving around client
      resources.

    hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
      Close up WALs when done.
This commit is contained in:
stack 2015-11-10 18:57:04 -10:00
parent 37815cac9e
commit 7280ec09df
3 changed files with 268 additions and 250 deletions

View File

@ -174,12 +174,12 @@ public class TestMetaWithReplicas {
util.getHBaseAdmin().disableTable(TABLE); util.getHBaseAdmin().disableTable(TABLE);
util.getHBaseAdmin().deleteTable(TABLE); util.getHBaseAdmin().deleteTable(TABLE);
} }
Table htable = util.createTable(TABLE, FAMILIES, conf); ServerName master = null;
try (Connection c = ConnectionFactory.createConnection(util.getConfiguration());) {
try (Table htable = util.createTable(TABLE, FAMILIES, conf);) {
util.getHBaseAdmin().flush(TableName.META_TABLE_NAME); util.getHBaseAdmin().flush(TableName.META_TABLE_NAME);
Thread.sleep(conf.getInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, Thread.sleep(conf.getInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD,
30000) * 6); 30000) * 6);
Connection c = ConnectionFactory.createConnection(util.getConfiguration());
List<HRegionInfo> regions = MetaTableAccessor.getTableRegions(zkw, c, List<HRegionInfo> regions = MetaTableAccessor.getTableRegions(zkw, c,
TableName.valueOf(TABLE)); TableName.valueOf(TABLE));
HRegionLocation hrl = MetaTableAccessor.getRegionLocation(c, regions.get(0)); HRegionLocation hrl = MetaTableAccessor.getRegionLocation(c, regions.get(0));
@ -199,7 +199,7 @@ public class TestMetaWithReplicas {
Thread.sleep(conf.getInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, Thread.sleep(conf.getInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD,
30000) * 3); 30000) * 3);
} }
ServerName master = util.getHBaseClusterInterface().getClusterStatus().getMaster(); master = util.getHBaseClusterInterface().getClusterStatus().getMaster();
// kill the master so that regionserver recovery is not triggered at all // kill the master so that regionserver recovery is not triggered at all
// for the meta server // for the meta server
util.getHBaseClusterInterface().stopMaster(master); util.getHBaseClusterInterface().stopMaster(master);
@ -209,17 +209,19 @@ public class TestMetaWithReplicas {
util.getHBaseClusterInterface().waitForRegionServerToStop(primary, 60000); util.getHBaseClusterInterface().waitForRegionServerToStop(primary, 60000);
} }
((ClusterConnection)c).clearRegionCache(); ((ClusterConnection)c).clearRegionCache();
htable.close(); }
htable = c.getTable(TableName.valueOf(TABLE)); Get get = null;
Result r = null;
byte[] row = "test".getBytes(); byte[] row = "test".getBytes();
try (Table htable = c.getTable(TableName.valueOf(TABLE));) {
Put put = new Put(row); Put put = new Put(row);
put.add("foo".getBytes(), row, row); put.add("foo".getBytes(), row, row);
BufferedMutator m = c.getBufferedMutator(TableName.valueOf(TABLE)); BufferedMutator m = c.getBufferedMutator(TableName.valueOf(TABLE));
m.mutate(put); m.mutate(put);
m.flush(); m.flush();
// Try to do a get of the row that was just put // Try to do a get of the row that was just put
Get get = new Get(row); get = new Get(row);
Result r = htable.get(get); r = htable.get(get);
assertTrue(Arrays.equals(r.getRow(), row)); assertTrue(Arrays.equals(r.getRow(), row));
// now start back the killed servers and disable use of replicas. That would mean // now start back the killed servers and disable use of replicas. That would mean
// calls go to the primary // calls go to the primary
@ -227,12 +229,14 @@ public class TestMetaWithReplicas {
util.getHBaseClusterInterface().startRegionServer(primary.getHostname(), 0); util.getHBaseClusterInterface().startRegionServer(primary.getHostname(), 0);
util.getHBaseClusterInterface().waitForActiveAndReadyMaster(); util.getHBaseClusterInterface().waitForActiveAndReadyMaster();
((ClusterConnection)c).clearRegionCache(); ((ClusterConnection)c).clearRegionCache();
htable.close(); }
conf.setBoolean(HConstants.USE_META_REPLICAS, false); conf.setBoolean(HConstants.USE_META_REPLICAS, false);
htable = c.getTable(TableName.valueOf(TABLE)); try (Table htable = c.getTable(TableName.valueOf(TABLE));) {
r = htable.get(get); r = htable.get(get);
assertTrue(Arrays.equals(r.getRow(), row)); assertTrue(Arrays.equals(r.getRow(), row));
} }
}
}
@Test @Test
public void testMetaLookupThreadPoolCreated() throws Exception { public void testMetaLookupThreadPoolCreated() throws Exception {
@ -242,7 +246,8 @@ public class TestMetaWithReplicas {
TEST_UTIL.getHBaseAdmin().disableTable(TABLE); TEST_UTIL.getHBaseAdmin().disableTable(TABLE);
TEST_UTIL.getHBaseAdmin().deleteTable(TABLE); TEST_UTIL.getHBaseAdmin().deleteTable(TABLE);
} }
Table htable = TEST_UTIL.createTable(TABLE, FAMILIES, TEST_UTIL.getConfiguration()); try (Table htable =
TEST_UTIL.createTable(TABLE, FAMILIES, TEST_UTIL.getConfiguration());) {
byte[] row = "test".getBytes(); byte[] row = "test".getBytes();
HConnectionImplementation c = ((HConnectionImplementation)((HTable)htable).connection); HConnectionImplementation c = ((HConnectionImplementation)((HTable)htable).connection);
// check that metalookup pool would get created // check that metalookup pool would get created
@ -250,6 +255,7 @@ public class TestMetaWithReplicas {
ExecutorService ex = c.getCurrentMetaLookupPool(); ExecutorService ex = c.getCurrentMetaLookupPool();
assert(ex != null); assert(ex != null);
} }
}
@Test @Test
public void testChangingReplicaCount() throws Exception { public void testChangingReplicaCount() throws Exception {
@ -408,7 +414,6 @@ public class TestMetaWithReplicas {
@Test @Test
public void testHBaseFsckWithExcessMetaReplicas() throws Exception { public void testHBaseFsckWithExcessMetaReplicas() throws Exception {
HBaseFsck hbck = new HBaseFsck(TEST_UTIL.getConfiguration());
// Create a meta replica (this will be the 4th one) and assign it // Create a meta replica (this will be the 4th one) and assign it
HRegionInfo h = RegionReplicaUtil.getRegionInfoForReplica( HRegionInfo h = RegionReplicaUtil.getRegionInfoForReplica(
HRegionInfo.FIRST_META_REGIONINFO, 3); HRegionInfo.FIRST_META_REGIONINFO, 3);
@ -418,7 +423,7 @@ public class TestMetaWithReplicas {
TEST_UTIL.getMiniHBaseCluster().getMaster().assignRegion(h); TEST_UTIL.getMiniHBaseCluster().getMaster().assignRegion(h);
HBaseFsckRepair.waitUntilAssigned(TEST_UTIL.getHBaseAdmin(), h); HBaseFsckRepair.waitUntilAssigned(TEST_UTIL.getHBaseAdmin(), h);
// check that problem exists // check that problem exists
hbck = doFsck(TEST_UTIL.getConfiguration(), false); HBaseFsck hbck = doFsck(TEST_UTIL.getConfiguration(), false);
assertErrors(hbck, new ERROR_CODE[]{ERROR_CODE.UNKNOWN, ERROR_CODE.SHOULD_NOT_BE_DEPLOYED}); assertErrors(hbck, new ERROR_CODE[]{ERROR_CODE.UNKNOWN, ERROR_CODE.SHOULD_NOT_BE_DEPLOYED});
// fix the problem // fix the problem
hbck = doFsck(TEST_UTIL.getConfiguration(), true); hbck = doFsck(TEST_UTIL.getConfiguration(), true);

View File

@ -169,7 +169,7 @@ public class TestImportExport {
@Test @Test
public void testSimpleCase() throws Exception { public void testSimpleCase() throws Exception {
String EXPORT_TABLE = "exportSimpleCase"; String EXPORT_TABLE = "exportSimpleCase";
Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3); try (Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);) {
Put p = new Put(ROW1); Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL); p.add(FAMILYA, QUAL, now, QUAL);
p.add(FAMILYA, QUAL, now+1, QUAL); p.add(FAMILYA, QUAL, now+1, QUAL);
@ -180,6 +180,7 @@ public class TestImportExport {
p.add(FAMILYA, QUAL, now+1, QUAL); p.add(FAMILYA, QUAL, now+1, QUAL);
p.add(FAMILYA, QUAL, now+2, QUAL); p.add(FAMILYA, QUAL, now+2, QUAL);
t.put(p); t.put(p);
}
String[] args = new String[] { String[] args = new String[] {
EXPORT_TABLE, EXPORT_TABLE,
@ -189,7 +190,7 @@ public class TestImportExport {
assertTrue(runExport(args)); assertTrue(runExport(args));
String IMPORT_TABLE = "importTableSimpleCase"; String IMPORT_TABLE = "importTableSimpleCase";
t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3); try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);) {
args = new String[] { args = new String[] {
"-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING, "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING,
IMPORT_TABLE, IMPORT_TABLE,
@ -206,6 +207,7 @@ public class TestImportExport {
r = t.get(g); r = t.get(g);
assertEquals(3, r.size()); assertEquals(3, r.size());
} }
}
/** /**
* Test export hbase:meta table * Test export hbase:meta table
@ -238,13 +240,12 @@ public class TestImportExport {
FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FileSystem fs = FileSystem.get(UTIL.getConfiguration());
fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name)); fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name));
String IMPORT_TABLE = name; String IMPORT_TABLE = name;
Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3); try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);) {
String[] args = new String[] { String[] args = new String[] {
"-Dhbase.import.version=0.94" , "-Dhbase.import.version=0.94" ,
IMPORT_TABLE, FQ_OUTPUT_DIR IMPORT_TABLE, FQ_OUTPUT_DIR
}; };
assertTrue(runImport(args)); assertTrue(runImport(args));
/* exportedTableIn94Format contains 5 rows /* exportedTableIn94Format contains 5 rows
ROW COLUMN+CELL ROW COLUMN+CELL
r1 column=f1:c1, timestamp=1383766761171, value=val1 r1 column=f1:c1, timestamp=1383766761171, value=val1
@ -254,7 +255,7 @@ public class TestImportExport {
r5 column=f1:c1, timestamp=1383766791506, value=val5 r5 column=f1:c1, timestamp=1383766791506, value=val5
*/ */
assertEquals(5, UTIL.countRows(t)); assertEquals(5, UTIL.countRows(t));
t.close(); }
} }
/** /**
@ -268,8 +269,7 @@ public class TestImportExport {
.setMaxVersions(1) .setMaxVersions(1)
); );
UTIL.getHBaseAdmin().createTable(desc); UTIL.getHBaseAdmin().createTable(desc);
Table t = new HTable(UTIL.getConfiguration(), desc.getTableName()); try (Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());) {
Put p = new Put(ROW1); Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL); p.add(FAMILYA, QUAL, now, QUAL);
p.add(FAMILYA, QUAL, now+1, QUAL); p.add(FAMILYA, QUAL, now+1, QUAL);
@ -287,11 +287,12 @@ public class TestImportExport {
FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FileSystem fs = FileSystem.get(UTIL.getConfiguration());
fs.delete(new Path(FQ_OUTPUT_DIR), true); fs.delete(new Path(FQ_OUTPUT_DIR), true);
t.close(); }
} }
@Test @Test
public void testWithDeletes() throws Exception { public void testWithDeletes() throws Exception {
String IMPORT_TABLE = "importWithDeletes";
String EXPORT_TABLE = "exportWithDeletes"; String EXPORT_TABLE = "exportWithDeletes";
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE)); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
desc.addFamily(new HColumnDescriptor(FAMILYA) desc.addFamily(new HColumnDescriptor(FAMILYA)
@ -299,8 +300,7 @@ public class TestImportExport {
.setKeepDeletedCells(true) .setKeepDeletedCells(true)
); );
UTIL.getHBaseAdmin().createTable(desc); UTIL.getHBaseAdmin().createTable(desc);
Table t = new HTable(UTIL.getConfiguration(), desc.getTableName()); try (Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());) {
Put p = new Put(ROW1); Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL); p.add(FAMILYA, QUAL, now, QUAL);
p.add(FAMILYA, QUAL, now+1, QUAL); p.add(FAMILYA, QUAL, now+1, QUAL);
@ -323,16 +323,15 @@ public class TestImportExport {
}; };
assertTrue(runExport(args)); assertTrue(runExport(args));
String IMPORT_TABLE = "importWithDeletes";
desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE)); desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
desc.addFamily(new HColumnDescriptor(FAMILYA) desc.addFamily(new HColumnDescriptor(FAMILYA)
.setMaxVersions(5) .setMaxVersions(5)
.setKeepDeletedCells(true) .setKeepDeletedCells(true)
); );
}
UTIL.getHBaseAdmin().createTable(desc); UTIL.getHBaseAdmin().createTable(desc);
t.close(); try (Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());) {
t = new HTable(UTIL.getConfiguration(), desc.getTableName()); String [] args = new String[] {
args = new String[] {
IMPORT_TABLE, IMPORT_TABLE,
FQ_OUTPUT_DIR FQ_OUTPUT_DIR
}; };
@ -351,9 +350,8 @@ public class TestImportExport {
assertEquals(now+2, res[4].getTimestamp()); assertEquals(now+2, res[4].getTimestamp());
assertEquals(now+1, res[5].getTimestamp()); assertEquals(now+1, res[5].getTimestamp());
assertEquals(now, res[6].getTimestamp()); assertEquals(now, res[6].getTimestamp());
t.close();
} }
}
@Test @Test
public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception { public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
@ -365,7 +363,6 @@ public class TestImportExport {
); );
UTIL.getHBaseAdmin().createTable(desc); UTIL.getHBaseAdmin().createTable(desc);
HTable exportT = new HTable(UTIL.getConfiguration(), EXPORT_TABLE); HTable exportT = new HTable(UTIL.getConfiguration(), EXPORT_TABLE);
//Add first version of QUAL //Add first version of QUAL
Put p = new Put(ROW1); Put p = new Put(ROW1);
p.add(FAMILYA, QUAL, now, QUAL); p.add(FAMILYA, QUAL, now, QUAL);
@ -417,15 +414,11 @@ public class TestImportExport {
ResultScanner exportedTScanner = exportT.getScanner(s); ResultScanner exportedTScanner = exportT.getScanner(s);
Result exportedTResult = exportedTScanner.next(); Result exportedTResult = exportedTScanner.next();
try try {
{
Result.compareResults(exportedTResult, importedTResult); Result.compareResults(exportedTResult, importedTResult);
} } catch (Exception e) {
catch (Exception e) {
fail("Original and imported tables data comparision failed with error:"+e.getMessage()); fail("Original and imported tables data comparision failed with error:"+e.getMessage());
} } finally {
finally
{
exportT.close(); exportT.close();
importT.close(); importT.close();
} }
@ -469,7 +462,8 @@ public class TestImportExport {
Table importTable = new HTable(UTIL.getConfiguration(), desc.getTableName()); Table importTable = new HTable(UTIL.getConfiguration(), desc.getTableName());
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
"-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR, "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE,
FQ_OUTPUT_DIR,
"1000" }; "1000" };
assertTrue(runImport(args)); assertTrue(runImport(args));
@ -633,8 +627,7 @@ public class TestImportExport {
public void testDurability() throws IOException, InterruptedException, ClassNotFoundException { public void testDurability() throws IOException, InterruptedException, ClassNotFoundException {
// Create an export table. // Create an export table.
String exportTableName = "exporttestDurability"; String exportTableName = "exporttestDurability";
Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3); try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) {
// Insert some data // Insert some data
Put put = new Put(ROW1); Put put = new Put(ROW1);
put.add(FAMILYA, QUAL, now, QUAL); put.add(FAMILYA, QUAL, now, QUAL);
@ -647,6 +640,7 @@ public class TestImportExport {
put.add(FAMILYA, QUAL, now + 1, QUAL); put.add(FAMILYA, QUAL, now + 1, QUAL);
put.add(FAMILYA, QUAL, now + 2, QUAL); put.add(FAMILYA, QUAL, now + 2, QUAL);
exportTable.put(put); exportTable.put(put);
}
// Run the export // Run the export
String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000"}; String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000"};
@ -654,13 +648,17 @@ public class TestImportExport {
// Create the table for import // Create the table for import
String importTableName = "importTestDurability1"; String importTableName = "importTestDurability1";
Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); WAL wal = null;
HRegionInfo region = null;
TableWALActionListener walListener = null;
try (Table importTable =
UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);) {
// Register the wal listener for the import table // Register the wal listener for the import table
TableWALActionListener walListener = new TableWALActionListener(importTableName); walListener = new TableWALActionListener(importTableName);
HRegionInfo region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
.getOnlineRegions(importTable.getName()).get(0).getRegionInfo(); .getOnlineRegions(importTable.getName()).get(0).getRegionInfo();
WAL wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region); wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
wal.registerWALActionsListener(walListener); wal.registerWALActionsListener(walListener);
// Run the import with SKIP_WAL // Run the import with SKIP_WAL
@ -674,8 +672,10 @@ public class TestImportExport {
assertTrue(getCount(importTable, null) == 2); assertTrue(getCount(importTable, null) == 2);
// Run the import with the default durability option // Run the import with the default durability option
}
importTableName = "importTestDurability2"; importTableName = "importTestDurability2";
importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); try (Table importTable =
UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);) {
region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
.getOnlineRegions(importTable.getName()).get(0).getRegionInfo(); .getOnlineRegions(importTable.getName()).get(0).getRegionInfo();
wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region); wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
@ -688,6 +688,7 @@ public class TestImportExport {
//Ensure that the count is 2 (only one version of key value is obtained) //Ensure that the count is 2 (only one version of key value is obtained)
assertTrue(getCount(importTable, null) == 2); assertTrue(getCount(importTable, null) == 2);
} }
}
/** /**
* This listens to the {@link #visitLogEntryBeforeWrite(HTableDescriptor, WALKey, WALEdit)} to * This listens to the {@link #visitLogEntryBeforeWrite(HTableDescriptor, WALKey, WALEdit)} to

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -60,6 +61,8 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WAL;
@ -100,7 +103,11 @@ public class TestAtomicOperation {
@After @After
public void teardown() throws IOException { public void teardown() throws IOException {
if (region != null) { if (region != null) {
BlockCache bc = region.getStores().get(0).getCacheConfig().getBlockCache();
((HRegion)region).close(); ((HRegion)region).close();
WAL wal = ((HRegion)region).getWAL();
if (wal != null) wal.close();
if (bc != null) bc.shutdown();
region = null; region = null;
} }
} }
@ -175,17 +182,15 @@ public class TestAtomicOperation {
*/ */
@Test @Test
public void testIncrementMultiThreads() throws IOException { public void testIncrementMultiThreads() throws IOException {
LOG.info("Starting test testIncrementMultiThreads"); LOG.info("Starting test testIncrementMultiThreads");
// run a with mixed column families (1 and 3 versions) // run a with mixed column families (1 and 3 versions)
initHRegion(tableName, name.getMethodName(), new int[] {1,3}, fam1, fam2); initHRegion(tableName, name.getMethodName(), new int[] {1,3}, fam1, fam2);
// create 100 threads, each will increment by its own quantity // create 25 threads, each will increment by its own quantity
int numThreads = 100; int numThreads = 25;
int incrementsPerThread = 1000; int incrementsPerThread = 1000;
Incrementer[] all = new Incrementer[numThreads]; Incrementer[] all = new Incrementer[numThreads];
int expectedTotal = 0; int expectedTotal = 0;
// create all threads // create all threads
for (int i = 0; i < numThreads; i++) { for (int i = 0; i < numThreads; i++) {
all[i] = new Incrementer(region, i, i, incrementsPerThread); all[i] = new Incrementer(region, i, i, incrementsPerThread);
@ -202,13 +207,13 @@ public class TestAtomicOperation {
try { try {
all[i].join(); all[i].join();
} catch (InterruptedException e) { } catch (InterruptedException e) {
LOG.info("Ignored", e);
} }
} }
assertICV(row, fam1, qual1, expectedTotal); assertICV(row, fam1, qual1, expectedTotal);
assertICV(row, fam1, qual2, expectedTotal*2); assertICV(row, fam1, qual2, expectedTotal*2);
assertICV(row, fam2, qual3, expectedTotal*3); assertICV(row, fam2, qual3, expectedTotal*3);
LOG.info("testIncrementMultiThreads successfully verified that total is " + LOG.info("testIncrementMultiThreads successfully verified that total is " + expectedTotal);
expectedTotal);
} }
@ -259,6 +264,7 @@ public class TestAtomicOperation {
public Incrementer(Region region, public Incrementer(Region region,
int threadNumber, int amount, int numIncrements) { int threadNumber, int amount, int numIncrements) {
super("incrementer." + threadNumber);
this.region = region; this.region = region;
this.numIncrements = numIncrements; this.numIncrements = numIncrements;
this.amount = amount; this.amount = amount;
@ -279,8 +285,15 @@ public class TestAtomicOperation {
// verify: Make sure we only see completed increments // verify: Make sure we only see completed increments
Get g = new Get(row); Get g = new Get(row);
Result result = region.get(g); Result result = region.get(g);
assertEquals(Bytes.toLong(result.getValue(fam1, qual1))*2, Bytes.toLong(result.getValue(fam1, qual2))); if (result != null) {
assertEquals(Bytes.toLong(result.getValue(fam1, qual1))*3, Bytes.toLong(result.getValue(fam2, qual3))); assertTrue(result.getValue(fam1, qual1) != null);
assertTrue(result.getValue(fam1, qual2) != null);
assertEquals(Bytes.toLong(result.getValue(fam1, qual1))*2,
Bytes.toLong(result.getValue(fam1, qual2)));
assertTrue(result.getValue(fam2, qual3) != null);
assertEquals(Bytes.toLong(result.getValue(fam1, qual1))*3,
Bytes.toLong(result.getValue(fam2, qual3)));
}
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }
@ -358,7 +371,7 @@ public class TestAtomicOperation {
// create 10 threads, each will alternate between adding and // create 10 threads, each will alternate between adding and
// removing a column // removing a column
int numThreads = 10; int numThreads = 10;
int opsPerThread = 500; int opsPerThread = 250;
AtomicOperation[] all = new AtomicOperation[numThreads]; AtomicOperation[] all = new AtomicOperation[numThreads];
AtomicLong timeStamps = new AtomicLong(0); AtomicLong timeStamps = new AtomicLong(0);
@ -450,7 +463,7 @@ public class TestAtomicOperation {
// create 10 threads, each will alternate between adding and // create 10 threads, each will alternate between adding and
// removing a column // removing a column
int numThreads = 10; int numThreads = 10;
int opsPerThread = 500; int opsPerThread = 250;
AtomicOperation[] all = new AtomicOperation[numThreads]; AtomicOperation[] all = new AtomicOperation[numThreads];
AtomicLong timeStamps = new AtomicLong(0); AtomicLong timeStamps = new AtomicLong(0);
@ -600,7 +613,6 @@ public class TestAtomicOperation {
for (Cell keyValue : results) { for (Cell keyValue : results) {
assertEquals("50",Bytes.toString(CellUtil.cloneValue(keyValue))); assertEquals("50",Bytes.toString(CellUtil.cloneValue(keyValue)));
} }
} }
private class PutThread extends TestThread { private class PutThread extends TestThread {