diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 7615d6e098a..9bbe8a96fe6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -376,7 +376,7 @@ public class ClientScanner extends AbstractClientScanner { if (values != null && values.length > 0) { for (Result rs : values) { cache.add(rs); - for (Cell kv : rs.raw()) { + for (Cell kv : rs.rawCells()) { // TODO make method in Cell or CellUtil remainingResultSize -= KeyValueUtil.ensureKeyValue(kv).heapSize(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java index 9039b01826b..6ef9d11d80f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.util.Bytes; * A Result is backed by an array of {@link KeyValue} objects, each representing * an HBase cell defined by the row, family, qualifier, timestamp, and value.

* - * The underlying {@link KeyValue} objects can be accessed through the method {@link #list()}. + * The underlying {@link KeyValue} objects can be accessed through the method {@link #listCells()}. * Each KeyValue can then be accessed through * {@link KeyValue#getRow()}, {@link KeyValue#getFamily()}, {@link KeyValue#getQualifier()}, * {@link KeyValue#getTimestamp()}, and {@link KeyValue#getValue()}.

@@ -85,7 +85,7 @@ public class Result implements CellScannable { public static final Result EMPTY_RESULT = new Result(); /** - * Creates an empty Result w/ no KeyValue payload; returns null if you call {@link #raw()}. + * Creates an empty Result w/ no KeyValue payload; returns null if you call {@link #rawCells()}. * Use this to represent no results if null won't do or in old 'mapred' as oppposed to 'mapreduce' package * MapReduce where you need to overwrite a Result * instance with a {@link #copyFrom(Result)} call. @@ -147,20 +147,55 @@ public class Result implements CellScannable { * * @return array of Cells; can be null if nothing in the result */ - public Cell[] raw() { + public Cell[] rawCells() { return cells; } + /** + * Return an cells of a Result as an array of KeyValues + * + * WARNING do not use, expensive. This does an arraycopy of the cell[]'s value. + * + * Added to ease transition from 0.94 -> 0.96. + * + * @deprecated as of 0.96, use {@link #rawCells()} + * @return array of KeyValues, empty array if nothing in result. + */ + @Deprecated + public KeyValue[] raw() { + KeyValue[] kvs = new KeyValue[cells.length]; + for (int i = 0 ; i < kvs.length; i++) { + kvs[i] = KeyValueUtil.ensureKeyValue(cells[i]); + } + return kvs; + } + /** * Create a sorted list of the Cell's in this result. * * Since HBase 0.20.5 this is equivalent to raw(). * - * @return The sorted list of Cell's. + * @return sorted List of Cells; can be null if no cells in the result */ - public List list() { - return isEmpty()? null: Arrays.asList(raw()); + public List listCells() { + return isEmpty()? null: Arrays.asList(rawCells()); } + + /** + * Return an cells of a Result as an array of KeyValues + * + * WARNING do not use, expensive. This does an arraycopy of the cell[]'s value. + * + * Added to ease transition from 0.94 -> 0.96. + * + * @deprecated as of 0.96, use {@link #listCells()} + * @return all sorted List of KeyValues; can be null if no cells in the result + */ + @Deprecated + public List list() { + return isEmpty() ? null : Arrays.asList(raw()); + } + /** * Return the Cells for the specific column. The Cells are sorted in @@ -180,7 +215,7 @@ public class Result implements CellScannable { public List getColumn(byte [] family, byte [] qualifier) { List result = new ArrayList(); - Cell [] kvs = raw(); + Cell [] kvs = rawCells(); if (kvs == null || kvs.length == 0) { return result; @@ -275,7 +310,7 @@ public class Result implements CellScannable { * selected in the query (Get/Scan) */ public Cell getColumnLatest(byte [] family, byte [] qualifier) { - Cell [] kvs = raw(); // side effect possibly. + Cell [] kvs = rawCells(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } @@ -306,7 +341,7 @@ public class Result implements CellScannable { public Cell getColumnLatest(byte [] family, int foffset, int flength, byte [] qualifier, int qoffset, int qlength) { - Cell [] kvs = raw(); // side effect possibly. + Cell [] kvs = rawCells(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } @@ -692,8 +727,8 @@ public class Result implements CellScannable { throw new Exception("This row doesn't have the same number of KVs: " + res1.toString() + " compared to " + res2.toString()); } - Cell[] ourKVs = res1.raw(); - Cell[] replicatedKVs = res2.raw(); + Cell[] ourKVs = res1.rawCells(); + Cell[] replicatedKVs = res2.rawCells(); for (int i = 0; i < res1.size(); i++) { if (!ourKVs[i].equals(replicatedKVs[i]) || !Bytes.equals(CellUtil.getValueArray(ourKVs[i]), CellUtil.getValueArray(replicatedKVs[i]))) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index a4747ec3255..0eb3cfdf298 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -253,7 +253,7 @@ public class ScannerCallable extends RegionServerCallable { } long resultSize = 0; for (Result rr : rrs) { - for (Cell kv : rr.raw()) { + for (Cell kv : rr.rawCells()) { // TODO add getLength to Cell/use CellUtil#estimatedSizeOf resultSize += KeyValueUtil.ensureKeyValue(kv).getLength(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index a976b44b2e8..16c0a8eb66e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -1026,7 +1026,7 @@ public final class ProtobufUtil { */ public static ClientProtos.Result toResult(final Result result) { ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder(); - Cell [] cells = result.raw(); + Cell [] cells = result.rawCells(); if (cells != null) { for (Cell c : cells) { builder.addCell(toCell(c)); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index 6be129dbe2b..485891b56a2 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -154,7 +154,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool { Iterator expectedIt = simple_expected.iterator(); while (resultsIt.hasNext() && expectedIt.hasNext()) { Result r = resultsIt.next(); - for (Cell actual : r.raw()) { + for (Cell actual : r.rawCells()) { assertTrue( "Ran out of expected values prematurely!", expectedIt.hasNext()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index a2c416adaa2..ba4eba4c615 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -248,7 +248,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { throws IOException, InterruptedException { BytesWritable bwKey = new BytesWritable(key.get()); BytesWritable bwVal = new BytesWritable(); - for (Cell kv : value.list()) { + for (Cell kv : value.listCells()) { if (Bytes.compareTo(TEST_QUALIFIER, 0, TEST_QUALIFIER.length, kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()) == 0) { context.write(bwKey, EMPTY); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java index b40de516332..ce35d9936e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java @@ -116,7 +116,7 @@ implements TableMap { ArrayList foundList = new ArrayList(); int numCols = columns.length; if (numCols > 0) { - for (Cell value: r.list()) { + for (Cell value: r.listCells()) { byte [] column = KeyValue.makeColumn(CellUtil.getFamilyArray(value), CellUtil.getQualifierArray(value)); for (int i = 0; i < numCols; i++) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index 7df25332970..f5561ef1b62 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -116,7 +116,7 @@ public class CellCounter { context.getCounter(Counters.ROWS).increment(1); context.write(new Text("Total ROWS"), new IntWritable(1)); - for (Cell value : values.list()) { + for (Cell value : values.listCells()) { currentRowKey = Bytes.toStringBinary(CellUtil.getRowArray(value)); String thisRowFamilyName = Bytes.toStringBinary(CellUtil.getFamilyArray(value)); if (!thisRowFamilyName.equals(currentFamilyName)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java index 4e4666338c1..3a75b25150a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java @@ -109,7 +109,7 @@ extends TableMapper implements Configurable { ArrayList foundList = new ArrayList(); int numCols = columns.length; if (numCols > 0) { - for (Cell value: r.list()) { + for (Cell value: r.listCells()) { byte [] column = KeyValue.makeColumn(CellUtil.getFamilyArray(value), CellUtil.getQualifierArray(value)); for (int i = 0; i < numCols; i++) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index 6099a8bfad9..9fae177b824 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -93,7 +93,7 @@ public class Import { Context context) throws IOException { try { - for (Cell kv : value.raw()) { + for (Cell kv : value.rawCells()) { kv = filterKv(kv); // skip if we filtered it out if (kv == null) continue; @@ -143,7 +143,7 @@ public class Import { throws IOException, InterruptedException { Put put = null; Delete delete = null; - for (Cell kv : result.raw()) { + for (Cell kv : result.rawCells()) { kv = filterKv(kv); // skip if we filter it out if (kv == null) continue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 5c8ccd215af..4d24821a78f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -3070,7 +3070,7 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa if (!results.isEmpty()) { for (Result r : results) { if (maxScannerResultSize < Long.MAX_VALUE){ - for (Cell kv : r.raw()) { + for (Cell kv : r.rawCells()) { // TODO currentScanResultSize += KeyValueUtil.ensureKeyValue(kv).heapSize(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java index fbd16e8ea30..ed796072c6a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java @@ -64,7 +64,7 @@ public class RowResultGenerator extends ResultGenerator { } Result result = table.get(get); if (result != null && !result.isEmpty()) { - valuesI = result.list().iterator(); + valuesI = result.listCells().iterator(); } } catch (DoNotRetryIOException e) { // Warn here because Stargate will return 404 in the case if multiple diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java index 75a56661e86..ebeae0d8e7f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java @@ -149,7 +149,7 @@ public class ScannerResultGenerator extends ResultGenerator { } } if (cached != null) { - rowI = cached.list().iterator(); + rowI = cached.listCells().iterator(); loop = true; cached = null; } else { @@ -162,7 +162,7 @@ public class ScannerResultGenerator extends ResultGenerator { LOG.error(StringUtils.stringifyException(e)); } if (result != null && !result.isEmpty()) { - rowI = result.list().iterator(); + rowI = result.listCells().iterator(); loop = true; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 26073f3f025..74390c9c6a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -513,7 +513,7 @@ public class AccessControlLists { byte[] entryName, Result result) { ListMultimap perms = ArrayListMultimap.create(); if (result != null && result.size() > 0) { - for (Cell kv : result.raw()) { + for (Cell kv : result.rawCells()) { Pair permissionsOfUserOnTable = parsePermissionRecord(entryName, kv); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 0b365681444..eecc9e2214e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -672,7 +672,7 @@ public class ThriftServerRunner implements Runnable { get.addColumn(family, qualifier); } Result result = table.get(get); - return ThriftUtilities.cellFromHBase(result.raw()); + return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); throw new IOError(e.getMessage()); @@ -704,7 +704,7 @@ public class ThriftServerRunner implements Runnable { get.addColumn(family, qualifier); get.setMaxVersions(numVersions); Result result = table.get(get); - return ThriftUtilities.cellFromHBase(result.raw()); + return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); throw new IOError(e.getMessage()); @@ -740,7 +740,7 @@ public class ThriftServerRunner implements Runnable { get.setTimeRange(0, timestamp); get.setMaxVersions(numVersions); Result result = table.get(get); - return ThriftUtilities.cellFromHBase(result.raw()); + return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); throw new IOError(e.getMessage()); @@ -1371,7 +1371,7 @@ public class ThriftServerRunner implements Runnable { try { HTable table = getTable(getBytes(tableName)); Result result = table.getRowOrBefore(getBytes(row), getBytes(family)); - return ThriftUtilities.cellFromHBase(result.raw()); + return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); throw new IOError(e.getMessage()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java index 59ec052ab81..3d9c8de4d26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java @@ -152,7 +152,7 @@ public class ThriftUtilities { result.row = ByteBuffer.wrap(result_.getRow()); if (sortColumns) { result.sortedColumns = new ArrayList(); - for (Cell kv : result_.raw()) { + for (Cell kv : result_.rawCells()) { result.sortedColumns.add(new TColumn( ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.getFamilyArray(kv), CellUtil.getQualifierArray(kv))), @@ -160,7 +160,7 @@ public class ThriftUtilities { } } else { result.columns = new TreeMap(); - for (Cell kv : result_.raw()) { + for (Cell kv : result_.rawCells()) { result.columns.put( ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.getFamilyArray(kv), CellUtil.getQualifierArray(kv))), diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 0ee8e84cefd..1a61e1b530f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -140,7 +140,7 @@ public class ThriftUtilities { * @return converted result, returns an empty result if the input is null */ public static TResult resultFromHBase(Result in) { - Cell[] raw = in.raw(); + Cell[] raw = in.rawCells(); TResult out = new TResult(); byte[] row = in.getRow(); if (row != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index efd13908a32..c706654b1a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -2570,8 +2570,8 @@ public class HBaseFsck extends Configured implements Tool { public boolean processRow(Result result) throws IOException { try { - // record the latest modification of this hbase:meta record - long ts = Collections.max(result.list(), comp).getTimestamp(); + // record the latest modification of this META record + long ts = Collections.max(result.listCells(), comp).getTimestamp(); Pair pair = HRegionInfo.getHRegionInfoAndServerName(result); if (pair == null || pair.getFirst() == null) { emptyRegionInfoQualifiers.add(result); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 3e5964909a2..a73becf3bca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -535,7 +535,7 @@ public abstract class HBaseTestCase extends TestCase { return false; } values.clear(); - values.addAll(results.list()); + values.addAll(results.listCells()); return true; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java index ebe83762513..94c950bee9f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java @@ -173,7 +173,7 @@ public class TestAcidGuarantees implements Tool { msg.append("Failed after ").append(numVerified).append("!"); msg.append("Expected=").append(Bytes.toStringBinary(expected)); msg.append("Got:\n"); - for (Cell kv : res.list()) { + for (Cell kv : res.listCells()) { msg.append(kv.toString()); msg.append(" val= "); msg.append(Bytes.toStringBinary(CellUtil.getValueArray(kv))); @@ -230,7 +230,7 @@ public class TestAcidGuarantees implements Tool { msg.append("Failed after ").append(numRowsScanned).append("!"); msg.append("Expected=").append(Bytes.toStringBinary(expected)); msg.append("Got:\n"); - for (Cell kv : res.list()) { + for (Cell kv : res.listCells()) { msg.append(kv.toString()); msg.append(" val= "); msg.append(Bytes.toStringBinary(CellUtil.getValueArray(kv))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java index 13ca61589dd..5f45be3bbad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java @@ -236,7 +236,7 @@ public class TestMultiVersions { get.setTimeStamp(timestamp[j]); Result result = table.get(get); int cellCount = 0; - for(@SuppressWarnings("unused")Cell kv : result.list()) { + for(@SuppressWarnings("unused")Cell kv : result.listCells()) { cellCount++; } assertTrue(cellCount == 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java index a392ae4445f..f1bd5668370 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java @@ -107,7 +107,7 @@ public class TimestampTestBase extends HBaseTestCase { get.setMaxVersions(3); Result result = incommon.get(get); assertEquals(1, result.size()); - long time = Bytes.toLong(CellUtil.getValueArray(result.raw()[0])); + long time = Bytes.toLong(CellUtil.getValueArray(result.rawCells()[0])); assertEquals(time, currentTime); } @@ -136,7 +136,7 @@ public class TimestampTestBase extends HBaseTestCase { get.addColumn(FAMILY_NAME, QUALIFIER_NAME); get.setMaxVersions(tss.length); Result result = incommon.get(get); - Cell [] kvs = result.raw(); + Cell [] kvs = result.rawCells(); assertEquals(kvs.length, tss.length); for(int i=0;i(); while ((result = scanner.next()) != null) { - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { kvListScan.add(kv); } } @@ -434,7 +434,7 @@ public class TestScannersFromClientSide { return; int i = 0; - for (Cell kv : result.raw()) { + for (Cell kv : result.rawCells()) { if (i >= expKvList.size()) { break; // we will check the size later } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java index aa1da9bad28..8ffef180fe3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java @@ -149,7 +149,7 @@ public class TestTimestampsFilter { Arrays.asList(6L, 106L, 306L)); assertEquals("# of rows returned from scan", 5, results.length); for (int rowIdx = 0; rowIdx < 5; rowIdx++) { - kvs = results[rowIdx].raw(); + kvs = results[rowIdx].rawCells(); // each row should have 5 columns. // And we have requested 3 versions for each. assertEquals("Number of KeyValues in result for row:" + rowIdx, @@ -196,15 +196,15 @@ public class TestTimestampsFilter { g.addColumn(FAMILY, Bytes.toBytes("column4")); Result result = ht.get(g); - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { System.out.println("found row " + Bytes.toString(CellUtil.getRowArray(kv)) + ", column " + Bytes.toString(CellUtil.getQualifierArray(kv)) + ", value " + Bytes.toString(CellUtil.getValueArray(kv))); } - assertEquals(result.list().size(), 2); - assertTrue(CellUtil.matchingValue(result.list().get(0), Bytes.toBytes("value2-3"))); - assertTrue(CellUtil.matchingValue(result.list().get(1), Bytes.toBytes("value4-3"))); + assertEquals(result.listCells().size(), 2); + assertTrue(CellUtil.matchingValue(result.listCells().get(0), Bytes.toBytes("value2-3"))); + assertTrue(CellUtil.matchingValue(result.listCells().get(1), Bytes.toBytes("value4-3"))); ht.close(); } @@ -325,7 +325,7 @@ public class TestTimestampsFilter { get.setMaxVersions(); Result result = ht.get(get); - return result.raw(); + return result.rawCells(); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java index dbea844c74a..c0ad0917789 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java @@ -123,7 +123,7 @@ public class TestOpenTableInCoprocessor { ResultScanner results = table.getScanner(scan); int count = 0; for (Result res : results) { - count += res.list().size(); + count += res.listCells().size(); System.out.println(count + ") " + res); } results.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java index 7e9cc76b5d5..22b03cc287e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java @@ -176,7 +176,7 @@ public class TestRegionObserverScannerOpenHook { Result r = region.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " - + r, r.list()); + + r, r.listCells()); } @Test @@ -201,7 +201,7 @@ public class TestRegionObserverScannerOpenHook { Result r = region.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " - + r, r.list()); + + r, r.listCells()); } /** @@ -262,13 +262,13 @@ public class TestRegionObserverScannerOpenHook { Result r = table.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " - + r, r.list()); + + r, r.listCells()); get = new Get(Bytes.toBytes("anotherrow")); r = table.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor Found: " - + r, r.list()); + + r, r.listCells()); table.close(); UTIL.shutdownMiniCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index d5af4852e94..fee298aeaf5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -166,7 +166,7 @@ public class TestRowProcessorEndpoint { Set expected = new HashSet(Arrays.asList(new String[]{"d", "e", "f", "g"})); Get get = new Get(ROW); - LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).list())); + LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).listCells())); assertEquals(expected, result); } @@ -177,7 +177,7 @@ public class TestRowProcessorEndpoint { int numThreads = 1000; concurrentExec(new IncrementRunner(), numThreads); Get get = new Get(ROW); - LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).list())); + LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).listCells())); int finalCounter = incrementCounter(table); assertEquals(numThreads + 1, finalCounter); assertEquals(0, failures.get()); @@ -238,11 +238,11 @@ public class TestRowProcessorEndpoint { int numThreads = 1000; concurrentExec(new SwapRowsRunner(), numThreads); LOG.debug("row keyvalues:" + - stringifyKvs(table.get(new Get(ROW)).list())); + stringifyKvs(table.get(new Get(ROW)).listCells())); LOG.debug("row2 keyvalues:" + - stringifyKvs(table.get(new Get(ROW2)).list())); - assertEquals(rowSize, table.get(new Get(ROW)).list().size()); - assertEquals(row2Size, table.get(new Get(ROW2)).list().size()); + stringifyKvs(table.get(new Get(ROW2)).listCells())); + assertEquals(rowSize, table.get(new Get(ROW)).listCells().size()); + assertEquals(row2Size, table.get(new Get(ROW2)).listCells().size()); assertEquals(0, failures.get()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java index 6ec94694f6a..20135ddede7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java @@ -219,7 +219,7 @@ public class TestColumnRangeFilter { Result result; while ((result = scanner.next()) != null) { - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { results.add(kv); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java index d65a2f58793..a35d5c570e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java @@ -93,7 +93,7 @@ public class TestFilterWithScanLimits { // row2 => for (Result result : scanner) { - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { kv_number++; LOG.debug(kv_number + ". kv: " + kv); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java index 0d385930bfd..097244e0737 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java @@ -89,7 +89,7 @@ public class TestFilterWrapper { // row2 (c1-c4) and row3(c1-c4) are returned for (Result result : scanner) { row_number++; - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { LOG.debug(kv_number + ". kv: " + kv); kv_number++; assertEquals("Returned row is not correct", new String(CellUtil.getRowArray(kv)), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java index f4aac94dd5b..a38e2bf20e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java @@ -163,7 +163,7 @@ public class TestFuzzyRowAndColumnRangeFilter { Result result; long timeBeforeScan = System.currentTimeMillis(); while ((result = scanner.next()) != null) { - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { LOG.info("Got rk: " + Bytes.toStringBinary(CellUtil.getRowArray(kv)) + " cq: " + Bytes.toStringBinary(CellUtil.getQualifierArray(kv))); results.add(kv); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index bfb845393c8..31ce15abe55 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -213,7 +213,7 @@ public class TestTableMapReduce { byte[] firstValue = null; byte[] secondValue = null; int count = 0; - for(Cell kv : r.list()) { + for(Cell kv : r.listCells()) { if (count == 0) { firstValue = CellUtil.getValueArray(kv); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java index 8183c4e97b8..6163bb9afa7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java @@ -106,7 +106,7 @@ public class TestCopyTable { Get g = new Get(Bytes.toBytes("row" + i)); Result r = t2.get(g); assertEquals(1, r.size()); - assertTrue(CellUtil.matchingQualifier(r.raw()[0], COLUMN1)); + assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], COLUMN1)); } t1.close(); @@ -150,7 +150,7 @@ public class TestCopyTable { Get g = new Get(ROW1); Result r = t2.get(g); assertEquals(1, r.size()); - assertTrue(CellUtil.matchingQualifier(r.raw()[0], COLUMN1)); + assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], COLUMN1)); g = new Get(ROW0); r = t2.get(g); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java index e0aac68c80d..c359137c7fa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java @@ -56,7 +56,7 @@ public class TestGroupingTableMapper { .toBytes("value1"))); keyValue.add(new KeyValue(row, Bytes.toBytes("family1"), Bytes.toBytes("clm"), Bytes .toBytes("value2"))); - when(result.list()).thenReturn(keyValue); + when(result.listCells()).thenReturn(keyValue); mapper.map(null, result, context); // template data byte[][] data = { Bytes.toBytes("value1"), Bytes.toBytes("value2") }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index 06e579354e3..0e01a5e3245 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -427,9 +427,9 @@ public class TestHFileOutputFormat { Scan scan = new Scan(); ResultScanner results = table.getScanner(scan); for (Result res : results) { - assertEquals(FAMILIES.length, res.raw().length); - Cell first = res.raw()[0]; - for (Cell kv : res.raw()) { + assertEquals(FAMILIES.length, res.rawCells().length); + Cell first = res.rawCells()[0]; + for (Cell kv : res.rawCells()) { assertTrue(CellUtil.matchingRow(first, kv)); assertTrue(Bytes.equals(CellUtil.getValueArray(first), CellUtil.getValueArray(kv))); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 9bab8b9f89f..21b665560ce 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -286,7 +286,7 @@ public class TestImportExport { s.setRaw(true); ResultScanner scanner = t.getScanner(s); Result r = scanner.next(); - Cell[] res = r.raw(); + Cell[] res = r.rawCells(); assertTrue(CellUtil.isDeleteFamily(res[0])); assertEquals(now+4, res[1].getTimestamp()); assertEquals(now+3, res[2].getTimestamp()); @@ -467,7 +467,7 @@ public class TestImportExport { Bytes.toBytes("value")), new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), Bytes.toBytes("value1")) }; - when(value.raw()).thenReturn(keys); + when(value.rawCells()).thenReturn(keys); importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index 15cdbb3aad0..e8d2ac93e6b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -316,7 +316,7 @@ public class TestImportTsv implements Configurable { ResultScanner resScanner = table.getScanner(scan); for (Result res : resScanner) { assertTrue(res.size() == 2); - List kvs = res.list(); + List kvs = res.listCells(); assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY"))); assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY"))); assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java index 003dd87b7e0..72e5d7ec32e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java @@ -213,7 +213,7 @@ public class TestMultithreadedTableMapper { byte[] firstValue = null; byte[] secondValue = null; int count = 0; - for(Cell kv : r.list()) { + for(Cell kv : r.listCells()) { if (count == 0) { firstValue = CellUtil.getValueArray(kv); }else if (count == 1) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java index 1be08b6d0b4..b47844b3a4e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java @@ -226,7 +226,7 @@ public class TestTableMapReduce { byte[] firstValue = null; byte[] secondValue = null; int count = 0; - for(Cell kv : r.list()) { + for(Cell kv : r.listCells()) { if (count == 0) { firstValue = CellUtil.getValueArray(kv); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java index f3b5ba4724a..ee5b7464e30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java @@ -110,7 +110,7 @@ public class TestTimeRangeMapRed { Context context) throws IOException { List tsList = new ArrayList(); - for (Cell kv : result.list()) { + for (Cell kv : result.listCells()) { tsList.add(kv.getTimestamp()); } @@ -196,7 +196,7 @@ public class TestTimeRangeMapRed { scan.setMaxVersions(1); ResultScanner scanner = table.getScanner(scan); for (Result r: scanner) { - for (Cell kv : r.list()) { + for (Cell kv : r.listCells()) { log.debug(Bytes.toString(r.getRow()) + "\t" + Bytes.toString(CellUtil.getFamilyArray(kv)) + "\t" + Bytes.toString(CellUtil.getQualifierArray(kv)) + "\t" + kv.getTimestamp() + "\t" + Bytes.toBoolean(CellUtil.getValueArray(kv))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java index 5960df08edb..64f98741696 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java @@ -124,7 +124,7 @@ public class TestWALPlayer { Get g = new Get(ROW); Result r = t2.get(g); assertEquals(1, r.size()); - assertTrue(CellUtil.matchingQualifier(r.raw()[0], COLUMN2)); + assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], COLUMN2)); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java index 815268f008b..80980966ecf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java @@ -174,7 +174,7 @@ public class TestAtomicOperation { Result result = region.get(get); assertEquals(1, result.size()); - Cell kv = result.raw()[0]; + Cell kv = result.rawCells()[0]; long r = Bytes.toLong(CellUtil.getValueArray(kv)); assertEquals(amount, r); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java index 0aa28dd24ef..58c4ba6dcaa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java @@ -160,7 +160,7 @@ public class TestBlocksRead extends HBaseTestCase { get.addColumn(cf, Bytes.toBytes(column)); } - kvs = region.get(get).raw(); + kvs = region.get(get).rawCells(); long blocksEnd = getBlkAccessCount(cf); if (expBlocks[i] != -1) { assertEquals("Blocks Read Check for Bloom: " + bloomType, expBlocks[i], diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 22c2dcfbfff..05840a23995 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -1066,7 +1066,7 @@ public class TestHRegion extends HBaseTestCase { Get get = new Get(row1); get.addColumn(fam2, qf1); - Cell [] actual = region.get(get).raw(); + Cell [] actual = region.get(get).rawCells(); Cell [] expected = {kv}; @@ -1386,7 +1386,7 @@ public class TestHRegion extends HBaseTestCase { Get get = new Get(row).addColumn(fam, qual); Result result = region.get(get); assertEquals(1, result.size()); - Cell kv = result.raw()[0]; + Cell kv = result.rawCells()[0]; LOG.info("Got: " + kv); assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp", kv.getTimestamp() != HConstants.LATEST_TIMESTAMP); @@ -1402,7 +1402,7 @@ public class TestHRegion extends HBaseTestCase { get = new Get(row).addColumn(fam, qual); result = region.get(get); assertEquals(1, result.size()); - kv = result.raw()[0]; + kv = result.rawCells()[0]; LOG.info("Got: " + kv); assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp", kv.getTimestamp() != HConstants.LATEST_TIMESTAMP); @@ -1656,9 +1656,9 @@ public class TestHRegion extends HBaseTestCase { Result res = region.get(get); assertEquals(expected.length, res.size()); for(int i=0; i