HBASE-5548 Add ability to get a table in the shell; BACKING OUT MISTAKEN CO-COMMIT OF HBASE-5840

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1333123 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-05-02 17:26:13 +00:00
parent cd8cf25386
commit 90864888ed
2 changed files with 0 additions and 57 deletions

View File

@ -491,22 +491,6 @@ public class HRegion implements HeapSize { // , Writable{
MonitoredTask status = TaskMonitor.get().createStatus( MonitoredTask status = TaskMonitor.get().createStatus(
"Initializing region " + this); "Initializing region " + this);
long nextSeqId = -1;
try {
nextSeqId = initializeRegionInternals(reporter, status);
return nextSeqId;
} finally {
// nextSeqid will be -1 if the initialization fails.
// At least it will be 0 otherwise.
if (nextSeqId == -1) {
status
.abort("Exception during region " + this.getRegionNameAsString() + " initialization.");
}
}
}
private long initializeRegionInternals(final CancelableProgressable reporter, MonitoredTask status)
throws IOException, UnsupportedEncodingException {
if (coprocessorHost != null) { if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-open hook"); status.setStatus("Running coprocessor pre-open hook");
coprocessorHost.preOpen(); coprocessorHost.preOpen();

View File

@ -69,7 +69,6 @@ import org.apache.hadoop.hbase.filter.NullComparator;
import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
@ -88,7 +87,6 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -3461,45 +3459,6 @@ public class TestHRegion extends HBaseTestCase {
} }
} }
} }
/**
* Testcase to check state of region initialization task set to ABORTED or not if any exceptions
* during initialization
*
* @throws Exception
*/
@Test
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
HRegionInfo info = null;
try {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.exists((Path) Mockito.anyObject())).thenThrow(new IOException());
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor("cf"));
info = new HRegionInfo(htd.getName(), HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY, false);
Path path = new Path(DIR + "testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization");
// no where we are instantiating HStore in this test case so useTableNameGlobally is null. To
// avoid NullPointerException we are setting useTableNameGlobally to false.
SchemaMetrics.setUseTableNameInTest(false);
region = HRegion.newHRegion(path, null, fs, conf, info, htd, null);
// region initialization throws IOException and set task state to ABORTED.
region.initialize();
fail("Region initialization should fail due to IOException");
} catch (IOException io) {
List<MonitoredTask> tasks = TaskMonitor.get().getTasks();
for (MonitoredTask monitoredTask : tasks) {
if (!(monitoredTask instanceof MonitoredRPCHandler)
&& monitoredTask.getDescription().contains(region.toString())) {
assertTrue("Region state should be ABORTED.",
monitoredTask.getState().equals(MonitoredTask.State.ABORTED));
break;
}
}
} finally {
HRegion.closeHRegion(region);
}
}
private void putData(int startRow, int numRows, byte [] qf, private void putData(int startRow, int numRows, byte [] qf,
byte [] ...families) byte [] ...families)