HBASE-8430 Cell decoder/scanner/etc. should not hide exceptions

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1478656 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-05-03 06:40:13 +00:00
parent d668363c23
commit b782a7202e
25 changed files with 79 additions and 71 deletions

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@ -50,7 +51,6 @@ import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.FailedLogCloseException;
import org.apache.hadoop.hbase.exceptions.HBaseIOException;
import org.apache.hadoop.hbase.exceptions.HBaseSnapshotException;
import org.apache.hadoop.hbase.exceptions.MasterNotRunningException;
import org.apache.hadoop.hbase.exceptions.NotServingRegionException;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.exceptions.HBaseIOException;
import org.apache.hadoop.hbase.HBaseIOException;
public class WrongRowIOException extends HBaseIOException {
private static final long serialVersionUID = -5849522209440123059L;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseIOException;
/**
* Subclass if exception is not meant to be retried: e.g.

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
/**

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseIOException;
/**
* This exception is thrown by the master when a region server was shut down and

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseIOException;
/**
* Thrown when something happens related to region handling.

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseIOException;
/**
*

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.exceptions.HBaseIOException;
import org.apache.hadoop.hbase.HBaseIOException;
public class StoppedRpcClientException extends HBaseIOException {
public StoppedRpcClientException() {
@ -27,4 +27,4 @@ public class StoppedRpcClientException extends HBaseIOException {
public StoppedRpcClientException(String msg) {
super(msg);
}
}
}

View File

@ -559,10 +559,10 @@ public final class ProtobufUtil {
* @param cellScanner
* @param proto the protocol buffer Mutate to convert
* @return the converted client Append
* @throws DoNotRetryIOException
* @throws IOException
*/
public static Append toAppend(final MutationProto proto, final CellScanner cellScanner)
throws DoNotRetryIOException {
throws IOException {
MutationType type = proto.getMutateType();
assert type == MutationType.APPEND : type.name();
byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -36,7 +37,7 @@ import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestPayloadCarryingRpcController {
@Test
public void testListOfCellScannerables() {
public void testListOfCellScannerables() throws IOException {
List<CellScannable> cells = new ArrayList<CellScannable>();
final int count = 10;
for (int i = 0; i < count; i++) {

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
@ -57,6 +59,7 @@ public interface CellScanner {
/**
* Advance the scanner 1 cell.
* @return true if the next cell is found and {@link #current()} will return a valid Cell
* @throws IOException
*/
boolean advance();
}
boolean advance() throws IOException;
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;
@ -142,7 +143,7 @@ public final class CellUtil {
}
@Override
public boolean advance() {
public boolean advance() throws IOException {
if (this.cellScanner == null) {
if (!this.iterator.hasNext()) return false;
this.cellScanner = this.iterator.next().cellScanner();

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.exceptions;
package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -32,17 +32,13 @@ public abstract class BaseDecoder implements Codec.Decoder {
}
@Override
public boolean advance() {
public boolean advance() throws IOException {
if (!this.hasNext) return this.hasNext;
try {
if (this.in.available() <= 0) {
this.hasNext = false;
return this.hasNext;
}
this.current = parseCell();
} catch (IOException e) {
throw new RuntimeException(e);
if (this.in.available() <= 0) {
this.hasNext = false;
return this.hasNext;
}
this.current = parseCell();
return this.hasNext;
}
@ -56,4 +52,4 @@ public abstract class BaseDecoder implements Codec.Decoder {
public Cell current() {
return this.current;
}
}
}

View File

@ -42,10 +42,6 @@ public abstract class BaseEncoder implements Codec.Encoder {
public void flush() throws IOException {
if (this.flushed) return;
this.flushed = true;
try {
this.out.flush();
} catch (IOException e) {
throw new CodecException(e);
}
this.out.flush();
}
}

View File

@ -40,22 +40,18 @@ public class CellCodec implements Codec {
@Override
public void write(Cell cell) throws IOException {
checkFlushed();
try {
// Row
write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
// Column family
write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
// Qualifier
write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
// Version
this.out.write(Bytes.toBytes(cell.getTimestamp()));
// Type
this.out.write(cell.getTypeByte());
// Value
write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
} catch (IOException e) {
throw new CodecException(e);
}
// Row
write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
// Column family
write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
// Qualifier
write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
// Version
this.out.write(Bytes.toBytes(cell.getTimestamp()));
// Type
this.out.write(cell.getTypeByte());
// Value
write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
}
/**

View File

@ -17,12 +17,16 @@
*/
package org.apache.hadoop.hbase.codec;
import java.io.IOException;
public class CodecException extends IOException {
private static final long serialVersionUID = -2850095011686914405L;
import org.apache.hadoop.hbase.HBaseIOException;
/**
* Thrown when problems in the codec whether setup or context.
*/
@SuppressWarnings("serial")
public class CodecException extends HBaseIOException {
public CodecException() {
super();
}
public CodecException(String message) {

View File

@ -53,11 +53,7 @@ public class KeyValueCodec implements Codec {
checkFlushed();
// This is crass and will not work when KV changes. Also if passed a non-kv Cell, it will
// make expensive copy.
try {
KeyValue.oswrite((KeyValue)KeyValueUtil.ensureKeyValue(cell), this.out);
} catch (IOException e) {
throw new CodecException(e);
}
KeyValue.oswrite((KeyValue)KeyValueUtil.ensureKeyValue(cell), this.out);
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
@ -61,12 +62,16 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{
@Override
public void individualSearcherAssertions(CellSearcher searcher) {
assertRowOffsetsCorrect();
assertRowOffsetsCorrect();
searcher.resetToBeforeFirstEntry();
//test first cell
searcher.advance();
try {
searcher.advance();
} catch (IOException e) {
throw new RuntimeException(e);
}
Cell first = searcher.current();
Assert.assertTrue(CellComparator.equals(d.get(0), first));

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
@ -66,7 +67,11 @@ public class TestRowDataSimple extends BaseTestRowData {
searcher.resetToBeforeFirstEntry();
// test first cell
searcher.advance();
try {
searcher.advance();
} catch (IOException e) {
throw new RuntimeException(e);
}
Cell first = searcher.current();
Assert.assertTrue(CellComparator.equals(d.get(0), first));

View File

@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.codec.BaseDecoder;
import org.apache.hadoop.hbase.codec.BaseEncoder;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.codec.CodecException;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import com.google.protobuf.ByteString;
@ -62,11 +61,7 @@ public class MessageCodec implements Codec {
builder.setValue(ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(),
cell.getValueLength()));
HBaseProtos.Cell pbcell = builder.build();
try {
pbcell.writeDelimitedTo(this.out);
} catch (IOException e) {
throw new CodecException(e);
}
pbcell.writeDelimitedTo(this.out);
}
}

View File

@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Chore;
import org.apache.hadoop.hbase.ClusterId;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@ -66,7 +67,6 @@ import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.HBaseIOException;
import org.apache.hadoop.hbase.exceptions.MasterNotRunningException;
import org.apache.hadoop.hbase.exceptions.NotAllMetaRegionsOnlineException;
import org.apache.hadoop.hbase.exceptions.PleaseHoldException;

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import java.util.ConcurrentModificationException;
@ -42,7 +43,7 @@ public class TestPutDeleteEtcCellIteration {
private static final int COUNT = 10;
@Test
public void testPutIteration() {
public void testPutIteration() throws IOException {
Put p = new Put(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
@ -58,7 +59,7 @@ public class TestPutDeleteEtcCellIteration {
}
@Test (expected = ConcurrentModificationException.class)
public void testPutConcurrentModificationOnIteration() {
public void testPutConcurrentModificationOnIteration() throws IOException {
Put p = new Put(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
@ -77,7 +78,7 @@ public class TestPutDeleteEtcCellIteration {
}
@Test
public void testDeleteIteration() {
public void testDeleteIteration() throws IOException {
Delete d = new Delete(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
@ -93,7 +94,7 @@ public class TestPutDeleteEtcCellIteration {
}
@Test
public void testAppendIteration() {
public void testAppendIteration() throws IOException {
Append a = new Append(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
@ -111,7 +112,7 @@ public class TestPutDeleteEtcCellIteration {
}
@Test
public void testIncrementIteration() {
public void testIncrementIteration() throws IOException {
Increment increment = new Increment(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
@ -131,7 +132,7 @@ public class TestPutDeleteEtcCellIteration {
}
@Test
public void testResultIteration() {
public void testResultIteration() throws IOException {
Cell [] cells = new Cell[COUNT];
for(int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);

View File

@ -113,8 +113,12 @@ public class TestIPC {
// building.
CellScanner cellScanner = pcrc.cellScanner();
List<Cell> list = new ArrayList<Cell>();
while(cellScanner.advance()) {
list.add(cellScanner.current());
try {
while(cellScanner.advance()) {
list.add(cellScanner.current());
}
} catch (IOException e) {
throw new ServiceException(e);
}
cellScanner = CellUtil.createCellScanner(list);
((PayloadCarryingRpcController)controller).setCellScanner(cellScanner);
@ -155,7 +159,7 @@ public class TestIPC {
@Test
public void testCompressCellBlock()
throws IOException, InterruptedException, SecurityException, NoSuchMethodException {
// Currently, you set
// Currently, you set
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName());
TestRpcServer rpcServer = new TestRpcServer();
@ -265,4 +269,4 @@ public class TestIPC {
rpcServer.stop();
}
}
}
}

View File

@ -37,6 +37,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.HBaseIOException;
import org.apache.hadoop.hbase.exceptions.MasterNotRunningException;
import org.apache.hadoop.hbase.exceptions.UnknownRegionException;
import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;