diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java index 86c246278db..1e4f79fd9f6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java @@ -178,19 +178,6 @@ public class Delete extends Mutation implements Comparable { return this; } - /** - * Delete all versions of all columns of the specified family. - *

- * Overrides previous calls to deleteColumn and deleteColumns for the - * specified family. - * @param family family name - * @return this for invocation chaining - * @deprecated Since 1.0.0. Use {@link #addFamily(byte[])} - */ - @Deprecated - public Delete deleteFamily(byte [] family) { - return addFamily(family); - } /** * Delete all versions of all columns of the specified family. @@ -201,26 +188,10 @@ public class Delete extends Mutation implements Comparable { * @return this for invocation chaining */ public Delete addFamily(final byte [] family) { - this.deleteFamily(family, this.ts); + this.addFamily(family, this.ts); return this; } - /** - * Delete all columns of the specified family with a timestamp less than - * or equal to the specified timestamp. - *

- * Overrides previous calls to deleteColumn and deleteColumns for the - * specified family. - * @param family family name - * @param timestamp maximum version timestamp - * @return this for invocation chaining - * @deprecated Since 1.0.0. Use {@link #addFamily(byte[], long)} - */ - @Deprecated - public Delete deleteFamily(byte [] family, long timestamp) { - return addFamily(family, timestamp); - } - /** * Delete all columns of the specified family with a timestamp less than * or equal to the specified timestamp. @@ -247,19 +218,6 @@ public class Delete extends Mutation implements Comparable { return this; } - /** - * Delete all columns of the specified family with a timestamp equal to - * the specified timestamp. - * @param family family name - * @param timestamp version timestamp - * @return this for invocation chaining - * @deprecated Since hbase-1.0.0. Use {@link #addFamilyVersion(byte[], long)} - */ - @Deprecated - public Delete deleteFamilyVersion(byte [] family, long timestamp) { - return addFamilyVersion(family, timestamp); - } - /** * Delete all columns of the specified family with a timestamp equal to * the specified timestamp. @@ -278,18 +236,6 @@ public class Delete extends Mutation implements Comparable { return this; } - /** - * Delete all versions of the specified column. - * @param family family name - * @param qualifier column qualifier - * @return this for invocation chaining - * @deprecated Since hbase-1.0.0. Use {@link #addColumns(byte[], byte[])} - */ - @Deprecated - public Delete deleteColumns(byte [] family, byte [] qualifier) { - return addColumns(family, qualifier); - } - /** * Delete all versions of the specified column. * @param family family name @@ -301,20 +247,6 @@ public class Delete extends Mutation implements Comparable { return this; } - /** - * Delete all versions of the specified column with a timestamp less than - * or equal to the specified timestamp. - * @param family family name - * @param qualifier column qualifier - * @param timestamp maximum version timestamp - * @return this for invocation chaining - * @deprecated Since hbase-1.0.0. Use {@link #addColumns(byte[], byte[], long)} - */ - @Deprecated - public Delete deleteColumns(byte [] family, byte [] qualifier, long timestamp) { - return addColumns(family, qualifier, timestamp); - } - /** * Delete all versions of the specified column with a timestamp less than * or equal to the specified timestamp. @@ -337,21 +269,6 @@ public class Delete extends Mutation implements Comparable { return this; } - /** - * Delete the latest version of the specified column. - * This is an expensive call in that on the server-side, it first does a - * get to find the latest versions timestamp. Then it adds a delete using - * the fetched cells timestamp. - * @param family family name - * @param qualifier column qualifier - * @return this for invocation chaining - * @deprecated Since hbase-1.0.0. Use {@link #addColumn(byte[], byte[])} - */ - @Deprecated - public Delete deleteColumn(byte [] family, byte [] qualifier) { - return addColumn(family, qualifier); - } - /** * Delete the latest version of the specified column. * This is an expensive call in that on the server-side, it first does a @@ -362,23 +279,10 @@ public class Delete extends Mutation implements Comparable { * @return this for invocation chaining */ public Delete addColumn(final byte [] family, final byte [] qualifier) { - this.deleteColumn(family, qualifier, this.ts); + this.addColumn(family, qualifier, this.ts); return this; } - /** - * Delete the specified version of the specified column. - * @param family family name - * @param qualifier column qualifier - * @param timestamp version timestamp - * @return this for invocation chaining - * @deprecated Since hbase-1.0.0. Use {@link #addColumn(byte[], byte[], long)} - */ - @Deprecated - public Delete deleteColumn(byte [] family, byte [] qualifier, long timestamp) { - return addColumn(family, qualifier, timestamp); - } - /** * Delete the specified version of the specified column. * @param family family name diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 15c5675be39..0d9c73b5a1f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -681,13 +681,13 @@ public final class ProtobufUtil { ts = qv.getTimestamp(); } if (deleteType == DeleteType.DELETE_ONE_VERSION) { - delete.deleteColumn(family, qualifier, ts); + delete.addColumn(family, qualifier, ts); } else if (deleteType == DeleteType.DELETE_MULTIPLE_VERSIONS) { - delete.deleteColumns(family, qualifier, ts); + delete.addColumns(family, qualifier, ts); } else if (deleteType == DeleteType.DELETE_FAMILY_VERSION) { - delete.deleteFamilyVersion(family, ts); + delete.addFamilyVersion(family, ts); } else { - delete.deleteFamily(family, ts); + delete.addFamily(family, ts); } } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java index e3582c1cbd2..debf2bd9b27 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java @@ -38,7 +38,7 @@ public class TestDeleteTimeStamp { long ts = 2014L; Delete delete = new Delete(ROW); delete.setTimestamp(ts); - delete.deleteColumn(FAMILY, QUALIFIER); + delete.addColumn(FAMILY, QUALIFIER); NavigableMap> familyCellmap = delete.getFamilyCellMap(); for (Entry> entry : familyCellmap.entrySet()) { for (Cell cell : entry.getValue()) { diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index 13a2567d00d..fa9c4ad9960 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -358,7 +358,7 @@ public class TestOperation { // produce a Delete operation Delete delete = new Delete(ROW); - delete.deleteColumn(FAMILY, QUALIFIER); + delete.addColumn(FAMILY, QUALIFIER); // get its JSON representation, and parse it json = delete.toJSON(); parsedJSON = mapper.readValue(json, HashMap.class); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java index 1515dc104c2..c9ab23c744f 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java @@ -205,7 +205,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor Set families = new TreeSet(Bytes.BYTES_COMPARATOR); for (Cell kv : deleteRow) { if (families.add(CellUtil.cloneFamily(kv))) { - delete.deleteFamily(CellUtil.cloneFamily(kv), ts); + delete.addFamily(CellUtil.cloneFamily(kv), ts); } } } else if (deleteType == DeleteType.COLUMN) { @@ -216,7 +216,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor // Making deleteColumns() calls more than once for the same cf:qualifier is not correct // Every call to deleteColumns() will add a new KV to the familymap which will finally // get written to the memstore as part of delete(). - delete.deleteColumns(column.family, column.qualifier, ts); + delete.addColumns(column.family, column.qualifier, ts); } } } else if (deleteType == DeleteType.VERSION) { @@ -227,7 +227,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor int noOfVersionsToDelete = 0; if (timestamp == null) { for (Cell kv : deleteRow) { - delete.deleteColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp()); + delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp()); noOfVersionsToDelete++; } } else { @@ -236,7 +236,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv)); // Only one version of particular column getting deleted. if (columns.add(column)) { - delete.deleteColumn(column.family, column.qualifier, ts); + delete.addColumn(column.family, column.qualifier, ts); noOfVersionsToDelete++; } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index c908474f590..2a146b32b35 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -345,7 +345,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB delete = new Delete(key.get()); } delete.setCellVisibility(new CellVisibility(visibilityExps)); - delete.deleteFamily(CellUtil.cloneFamily(kv)); + delete.addFamily(CellUtil.cloneFamily(kv)); } if (delete != null) { context.write(key, delete); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java index ff1345cd615..f9223431354 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java @@ -372,9 +372,9 @@ public class RowResource extends ResourceBase { byte[][] split = KeyValue.parseColumn(column); if (rowspec.hasTimestamp()) { if (split.length == 1) { - delete.deleteFamily(split[0], rowspec.getTimestamp()); + delete.addFamily(split[0], rowspec.getTimestamp()); } else if (split.length == 2) { - delete.deleteColumns(split[0], split[1], rowspec.getTimestamp()); + delete.addColumns(split[0], split[1], rowspec.getTimestamp()); } else { return Response.status(Response.Status.BAD_REQUEST) .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) @@ -382,9 +382,9 @@ public class RowResource extends ResourceBase { } } else { if (split.length == 1) { - delete.deleteFamily(split[0]); + delete.addFamily(split[0]); } else if (split.length == 2) { - delete.deleteColumns(split[0], split[1]); + delete.addColumns(split[0], split[1]); } else { return Response.status(Response.Status.BAD_REQUEST) .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) @@ -550,12 +550,12 @@ public class RowResource extends ResourceBase { byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn); if (parts.length == 2) { if (parts[1].length != 0) { - delete.deleteColumns(parts[0], parts[1]); + delete.addColumns(parts[0], parts[1]); retValue = table.checkAndDelete(key, parts[0], parts[1], valueToDeleteCell.getValue(), delete); } else { // The case of empty qualifier. - delete.deleteColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY)); + delete.addColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY)); retValue = table.checkAndDelete(key, parts[0], Bytes.toBytes(StringUtils.EMPTY), valueToDeleteCell.getValue(), delete); } @@ -565,7 +565,7 @@ public class RowResource extends ResourceBase { .type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF) .build(); } - delete.deleteColumns(parts[0], parts[1]); + delete.addColumns(parts[0], parts[1]); if (LOG.isDebugEnabled()) { LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns " diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java index 9b688066e44..eed4f1a1e69 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java @@ -183,14 +183,14 @@ public class TestScannersWithFilters { // Delete the second qualifier from all rows and families for(byte [] ROW : ROWS_ONE) { Delete d = new Delete(ROW); - d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]); - d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]); + d.addColumns(FAMILIES[0], QUALIFIERS_ONE[1]); + d.addColumns(FAMILIES[1], QUALIFIERS_ONE[1]); table.delete(d); } for(byte [] ROW : ROWS_TWO) { Delete d = new Delete(ROW); - d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]); - d.deleteColumns(FAMILIES[1], QUALIFIERS_TWO[1]); + d.addColumns(FAMILIES[0], QUALIFIERS_TWO[1]); + d.addColumns(FAMILIES[1], QUALIFIERS_TWO[1]); table.delete(d); } colsPerRow -= 2; @@ -198,14 +198,14 @@ public class TestScannersWithFilters { // Delete the second rows from both groups, one column at a time for(byte [] QUALIFIER : QUALIFIERS_ONE) { Delete d = new Delete(ROWS_ONE[1]); - d.deleteColumns(FAMILIES[0], QUALIFIER); - d.deleteColumns(FAMILIES[1], QUALIFIER); + d.addColumns(FAMILIES[0], QUALIFIER); + d.addColumns(FAMILIES[1], QUALIFIER); table.delete(d); } for(byte [] QUALIFIER : QUALIFIERS_TWO) { Delete d = new Delete(ROWS_TWO[1]); - d.deleteColumns(FAMILIES[0], QUALIFIER); - d.deleteColumns(FAMILIES[1], QUALIFIER); + d.addColumns(FAMILIES[0], QUALIFIER); + d.addColumns(FAMILIES[1], QUALIFIER); table.delete(d); } numRows -= 2; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java index 6d219e19313..af64c669d61 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java @@ -157,7 +157,7 @@ public class QuotaUtil extends QuotaTableUtil { final byte[] qualifier) throws IOException { Delete delete = new Delete(rowKey); if (qualifier != null) { - delete.deleteColumns(QUOTA_FAMILY_INFO, qualifier); + delete.addColumns(QUOTA_FAMILY_INFO, qualifier); } doDelete(connection, delete); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 88c5427a0de..bc3da96cde9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -2012,8 +2012,8 @@ public class HBaseFsck extends Configured implements Closeable { private void resetSplitParent(HbckInfo hi) throws IOException { RowMutations mutations = new RowMutations(hi.metaEntry.getRegionName()); Delete d = new Delete(hi.metaEntry.getRegionName()); - d.deleteColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER); - d.deleteColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER); + d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER); + d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER); mutations.add(d); HRegionInfo hri = new HRegionInfo(hi.metaEntry); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 6f02ab06936..ff170653c0f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -2133,7 +2133,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { for (int i = startRow; i < endRow; i++) { byte[] data = Bytes.toBytes(String.valueOf(i)); Delete delete = new Delete(data); - delete.deleteFamily(f); + delete.addFamily(f); t.delete(delete); } } @@ -3418,9 +3418,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { ts + "_random_" + rand.nextLong()); put.addColumn(cf, qual, ts, value); } else if (rand.nextDouble() < 0.8) { - del.deleteColumn(cf, qual, ts); + del.addColumn(cf, qual, ts); } else { - del.deleteColumns(cf, qual, ts); + del.addColumns(cf, qual, ts); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 7901b814432..9c9ec87ccd9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -206,7 +206,7 @@ public class TestFromClientSide { h.delete(d); d = new Delete(T1, ts+3); - d.deleteColumns(FAMILY, C0, ts+3); + d.addColumns(FAMILY, C0, ts+3); h.delete(d); Get g = new Get(T1); @@ -262,7 +262,7 @@ public class TestFromClientSide { assertArrayEquals(VALUE, result.getValue(FAMILY, COLUMN)); Delete del = new Delete(ROW); - del.deleteColumn(FAMILY, COLUMN, ts); + del.addColumn(FAMILY, COLUMN, ts); table.delete(del); get = new Get(ROW); @@ -360,7 +360,7 @@ public class TestFromClientSide { while (it.hasNext()) { Result result = it.next(); Delete delete = new Delete(result.getRow()); - delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2")); + delete.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2")); ht.delete(delete); count++; } @@ -933,7 +933,7 @@ public class TestFromClientSide { // Delete a storefile column //////////////////////////////////////////////////////////////////////////// delete = new Delete(ROWS[0]); - delete.deleteColumns(FAMILIES[6], QUALIFIERS[7]); + delete.addColumns(FAMILIES[6], QUALIFIERS[7]); ht.delete(delete); // Try to get deleted column @@ -974,7 +974,7 @@ public class TestFromClientSide { // Delete a memstore column //////////////////////////////////////////////////////////////////////////// delete = new Delete(ROWS[0]); - delete.deleteColumns(FAMILIES[6], QUALIFIERS[8]); + delete.addColumns(FAMILIES[6], QUALIFIERS[8]); ht.delete(delete); // Try to get deleted column @@ -1016,7 +1016,7 @@ public class TestFromClientSide { //////////////////////////////////////////////////////////////////////////// delete = new Delete(ROWS[0]); - delete.deleteFamily(FAMILIES[4]); + delete.addFamily(FAMILIES[4]); ht.delete(delete); // Try to get storefile column in deleted family @@ -1180,7 +1180,7 @@ public class TestFromClientSide { scanTestNull(ht, ROW, FAMILY, VALUE); Delete delete = new Delete(ROW); - delete.deleteColumns(FAMILY, null); + delete.addColumns(FAMILY, null); ht.delete(delete); Get get = new Get(ROW); @@ -1211,7 +1211,7 @@ public class TestFromClientSide { scanTestNull(ht, ROW, FAMILY, VALUE); Delete delete = new Delete(ROW); - delete.deleteColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY); + delete.addColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY); ht.delete(delete); Get get = new Get(ROW); @@ -1239,7 +1239,7 @@ public class TestFromClientSide { assertSingleResult(result, ROW, FAMILY, QUALIFIER, null); Delete delete = new Delete(ROW); - delete.deleteColumns(FAMILY, QUALIFIER); + delete.addColumns(FAMILY, QUALIFIER); ht.delete(delete); get = new Get(ROW); @@ -1440,8 +1440,8 @@ public class TestFromClientSide { // Delete a version in the memstore and a version in a storefile Delete delete = new Delete(ROW); - delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[11]); - delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[7]); + delete.addColumn(FAMILY, QUALIFIER, STAMPS[11]); + delete.addColumn(FAMILY, QUALIFIER, STAMPS[7]); ht.delete(delete); // Test that it's gone @@ -1678,8 +1678,8 @@ public class TestFromClientSide { admin.flush(TABLE); Delete delete = new Delete(ROW); - delete.deleteFamilyVersion(FAMILY, ts[1]); // delete version '2000' - delete.deleteFamilyVersion(FAMILY, ts[3]); // delete version '4000' + delete.addFamilyVersion(FAMILY, ts[1]); // delete version '2000' + delete.addFamilyVersion(FAMILY, ts[3]); // delete version '4000' ht.delete(delete); admin.flush(TABLE); @@ -1733,24 +1733,24 @@ public class TestFromClientSide { // 3. delete on ROW delete = new Delete(ROW); // delete version <= 2000 of all columns - // note: deleteFamily must be the first since it will mask + // note: addFamily must be the first since it will mask // the subsequent other type deletes! - delete.deleteFamily(FAMILY, ts[1]); + delete.addFamily(FAMILY, ts[1]); // delete version '4000' of all columns - delete.deleteFamilyVersion(FAMILY, ts[3]); + delete.addFamilyVersion(FAMILY, ts[3]); // delete version <= 3000 of column 0 - delete.deleteColumns(FAMILY, QUALIFIERS[0], ts[2]); + delete.addColumns(FAMILY, QUALIFIERS[0], ts[2]); // delete version <= 5000 of column 2 - delete.deleteColumns(FAMILY, QUALIFIERS[2], ts[4]); + delete.addColumns(FAMILY, QUALIFIERS[2], ts[4]); // delete version 5000 of column 4 - delete.deleteColumn(FAMILY, QUALIFIERS[4], ts[4]); + delete.addColumn(FAMILY, QUALIFIERS[4], ts[4]); ht.delete(delete); admin.flush(TABLE); // 4. delete on ROWS[0] delete = new Delete(ROW2); - delete.deleteFamilyVersion(FAMILY, ts[1]); // delete version '2000' - delete.deleteFamilyVersion(FAMILY, ts[3]); // delete version '4000' + delete.addFamilyVersion(FAMILY, ts[1]); // delete version '2000' + delete.addFamilyVersion(FAMILY, ts[3]); // delete version '4000' ht.delete(delete); admin.flush(TABLE); @@ -1830,7 +1830,7 @@ public class TestFromClientSide { ht.put(put); Delete delete = new Delete(ROW); - delete.deleteFamily(FAMILIES[0], ts[0]); + delete.addFamily(FAMILIES[0], ts[0]); ht.delete(delete); Get get = new Get(ROW); @@ -1862,7 +1862,7 @@ public class TestFromClientSide { ht.put(put); delete = new Delete(ROW); - delete.deleteColumn(FAMILIES[0], QUALIFIER); // ts[4] + delete.addColumn(FAMILIES[0], QUALIFIER); // ts[4] ht.delete(delete); get = new Get(ROW); @@ -1885,12 +1885,12 @@ public class TestFromClientSide { // Test for HBASE-1847 delete = new Delete(ROW); - delete.deleteColumn(FAMILIES[0], null); + delete.addColumn(FAMILIES[0], null); ht.delete(delete); // Cleanup null qualifier delete = new Delete(ROW); - delete.deleteColumns(FAMILIES[0], null); + delete.addColumns(FAMILIES[0], null); ht.delete(delete); // Expected client behavior might be that you can re-put deleted values @@ -1958,17 +1958,17 @@ public class TestFromClientSide { result.size() == 4); delete = new Delete(ROWS[0]); - delete.deleteFamily(FAMILIES[2]); + delete.addFamily(FAMILIES[2]); ht.delete(delete); delete = new Delete(ROWS[1]); - delete.deleteColumns(FAMILIES[1], QUALIFIER); + delete.addColumns(FAMILIES[1], QUALIFIER); ht.delete(delete); delete = new Delete(ROWS[2]); - delete.deleteColumn(FAMILIES[1], QUALIFIER); - delete.deleteColumn(FAMILIES[1], QUALIFIER); - delete.deleteColumn(FAMILIES[2], QUALIFIER); + delete.addColumn(FAMILIES[1], QUALIFIER); + delete.addColumn(FAMILIES[1], QUALIFIER); + delete.addColumn(FAMILIES[2], QUALIFIER); ht.delete(delete); get = new Get(ROWS[0]); @@ -2036,7 +2036,7 @@ public class TestFromClientSide { // Test if we delete the family first in one row (HBASE-1541) delete = new Delete(ROWS[3]); - delete.deleteFamily(FAMILIES[1]); + delete.addFamily(FAMILIES[1]); ht.delete(delete); put = new Put(ROWS[3]); @@ -2102,7 +2102,7 @@ public class TestFromClientSide { for (int i = 0; i < 10; i++) { byte [] bytes = Bytes.toBytes(i); delete = new Delete(bytes); - delete.deleteFamily(FAMILIES[0]); + delete.addFamily(FAMILIES[0]); deletes.add(delete); } ht.delete(deletes); @@ -3467,8 +3467,8 @@ public class TestFromClientSide { // Delete a version in the memstore and a version in a storefile Delete delete = new Delete(ROW); - delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[11]); - delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[7]); + delete.addColumn(FAMILY, QUALIFIER, STAMPS[11]); + delete.addColumn(FAMILY, QUALIFIER, STAMPS[7]); ht.delete(delete); // Test that it's gone @@ -4350,7 +4350,7 @@ public class TestFromClientSide { p.addColumn(FAMILY, QUALIFIERS[1], VALUE); arm.add(p); Delete d = new Delete(ROW); - d.deleteColumns(FAMILY, QUALIFIERS[0]); + d.addColumns(FAMILY, QUALIFIERS[0]); arm.add(d); // TODO: Trying mutateRow again. The batch was failing with a one try only. t.mutateRow(arm); @@ -4897,7 +4897,7 @@ public class TestFromClientSide { put3.addColumn(FAMILY, QUALIFIER, value3); Delete delete = new Delete(ROW); - delete.deleteColumns(FAMILY, QUALIFIER); + delete.addColumns(FAMILY, QUALIFIER); // cell = "bbbb", using "aaaa" to compare only LESS/LESS_OR_EQUAL/NOT_EQUAL // turns out "match" @@ -5427,7 +5427,7 @@ public class TestFromClientSide { try { Delete delete = new Delete(ROW); - delete.deleteFamily(FAMILY, -1); + delete.addFamily(FAMILY, -1); table.delete(delete); fail("Negative timestamps should not have been allowed"); } catch (IllegalArgumentException ex) { @@ -5909,7 +5909,7 @@ public class TestFromClientSide { ht.put(put); scanTestNull(ht, ROW, FAMILY, VALUE, true); Delete delete = new Delete(ROW); - delete.deleteColumns(FAMILY, null); + delete.addColumns(FAMILY, null); ht.delete(delete); // Use a new table byte[] TABLE2 = Bytes.toBytes("testNull2WithReverseScan"); @@ -5922,7 +5922,7 @@ public class TestFromClientSide { TEST_UTIL.flush(); scanTestNull(ht, ROW, FAMILY, VALUE, true); delete = new Delete(ROW); - delete.deleteColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY); + delete.addColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY); ht.delete(delete); // Null value put = new Put(ROW); @@ -5951,7 +5951,7 @@ public class TestFromClientSide { ht.put(put); Delete delete = new Delete(ROW); - delete.deleteFamily(FAMILIES[0], ts[0]); + delete.addFamily(FAMILIES[0], ts[0]); ht.delete(delete); Scan scan = new Scan(ROW); @@ -5973,7 +5973,7 @@ public class TestFromClientSide { ht.put(put); delete = new Delete(ROW); - delete.deleteColumn(FAMILIES[0], QUALIFIER); // ts[4] + delete.addColumn(FAMILIES[0], QUALIFIER); // ts[4] ht.delete(delete); scan = new Scan(ROW); @@ -5986,12 +5986,12 @@ public class TestFromClientSide { // Test for HBASE-1847 delete = new Delete(ROW); - delete.deleteColumn(FAMILIES[0], null); + delete.addColumn(FAMILIES[0], null); ht.delete(delete); // Cleanup null qualifier delete = new Delete(ROW); - delete.deleteColumns(FAMILIES[0], null); + delete.addColumns(FAMILIES[0], null); ht.delete(delete); // Expected client behavior might be that you can re-put deleted values @@ -6038,17 +6038,17 @@ public class TestFromClientSide { ht.put(put); delete = new Delete(ROWS[0]); - delete.deleteFamily(FAMILIES[2]); + delete.addFamily(FAMILIES[2]); ht.delete(delete); delete = new Delete(ROWS[1]); - delete.deleteColumns(FAMILIES[1], QUALIFIER); + delete.addColumns(FAMILIES[1], QUALIFIER); ht.delete(delete); delete = new Delete(ROWS[2]); - delete.deleteColumn(FAMILIES[1], QUALIFIER); - delete.deleteColumn(FAMILIES[1], QUALIFIER); - delete.deleteColumn(FAMILIES[2], QUALIFIER); + delete.addColumn(FAMILIES[1], QUALIFIER); + delete.addColumn(FAMILIES[1], QUALIFIER); + delete.addColumn(FAMILIES[2], QUALIFIER); ht.delete(delete); scan = new Scan(ROWS[0]); @@ -6084,7 +6084,7 @@ public class TestFromClientSide { // Test if we delete the family first in one row (HBASE-1541) delete = new Delete(ROWS[3]); - delete.deleteFamily(FAMILIES[1]); + delete.addFamily(FAMILIES[1]); ht.delete(delete); put = new Put(ROWS[3]); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java index c83b709ccfc..59c863ef2f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java @@ -399,7 +399,7 @@ public class TestMultiParallel { ArrayList deletes = new ArrayList(); for (int i = 0; i < KEYS.length; i++) { Delete delete = new Delete(KEYS[i]); - delete.deleteFamily(BYTES_FAMILY); + delete.addFamily(BYTES_FAMILY); deletes.add(delete); } table.delete(deletes); @@ -615,7 +615,7 @@ public class TestMultiParallel { // 3 delete Delete delete = new Delete(KEYS[20]); - delete.deleteFamily(BYTES_FAMILY); + delete.addFamily(BYTES_FAMILY); actions.add(delete); // 4 get diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java index 174b4300eed..56f01c3d90a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java @@ -512,7 +512,7 @@ public class TestMultipleTimestamps { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Delete del = new Delete(row); - del.deleteColumn(cf, column, version); + del.addColumn(cf, column, version); ht.delete(del); } @@ -526,7 +526,7 @@ public class TestMultipleTimestamps { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Delete del = new Delete(row); - del.deleteColumns(cf, column, version); + del.addColumns(cf, column, version); ht.delete(del); } @@ -534,14 +534,14 @@ public class TestMultipleTimestamps { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Delete del = new Delete(row); - del.deleteColumns(cf, column); + del.addColumns(cf, column); ht.delete(del); } private void deleteFamily(Table ht, byte[] cf, int rowIdx) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); Delete del = new Delete(row); - del.deleteFamily(cf); + del.addFamily(cf); ht.delete(del); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index 50efed964b0..60eb92f5754 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -84,7 +84,7 @@ public class TestPutDeleteEtcCellIteration { Delete d = new Delete(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); - d.deleteColumn(bytes, bytes, TIMESTAMP); + d.addColumn(bytes, bytes, TIMESTAMP); } int index = 0; for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java index 54963aea538..77771ba1cf8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java @@ -135,7 +135,7 @@ public class TestScannersFromClientSide { // delete upto ts: 3 delete = new Delete(ROW); - delete.deleteFamily(FAMILY, 3); + delete.addFamily(FAMILY, 3); ht.delete(delete); // without batch diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java index 567e887e469..c80375265a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java @@ -376,7 +376,7 @@ public class TestTimestampsFilter { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Delete del = new Delete(row); - del.deleteColumn(cf, column, version); + del.addColumn(cf, column, version); ht.delete(del); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index 45ba04bb008..abfadec3372 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -157,9 +157,9 @@ public class TestRegionObserverInterface { new Boolean[] { true, true, true, true, false, false }); Delete delete = new Delete(ROW); - delete.deleteColumn(A, A); - delete.deleteColumn(B, B); - delete.deleteColumn(C, C); + delete.addColumn(A, A); + delete.addColumn(B, B); + delete.addColumn(C, C); table.delete(delete); verifyMethodResult(SimpleRegionObserver.class, @@ -194,9 +194,9 @@ public class TestRegionObserverInterface { put.addColumn(C, C, C); Delete delete = new Delete(ROW); - delete.deleteColumn(A, A); - delete.deleteColumn(B, B); - delete.deleteColumn(C, C); + delete.addColumn(A, A); + delete.addColumn(B, B); + delete.addColumn(C, C); RowMutations arm = new RowMutations(ROW); arm.add(put); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index e0a486e072c..1e89685961b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -189,14 +189,14 @@ public class TestFilter { // Delete the second qualifier from all rows and families for(byte [] ROW : ROWS_ONE) { Delete d = new Delete(ROW); - d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]); - d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]); + d.addColumns(FAMILIES[0], QUALIFIERS_ONE[1]); + d.addColumns(FAMILIES[1], QUALIFIERS_ONE[1]); this.region.delete(d); } for(byte [] ROW : ROWS_TWO) { Delete d = new Delete(ROW); - d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]); - d.deleteColumns(FAMILIES[1], QUALIFIERS_TWO[1]); + d.addColumns(FAMILIES[0], QUALIFIERS_TWO[1]); + d.addColumns(FAMILIES[1], QUALIFIERS_TWO[1]); this.region.delete(d); } colsPerRow -= 2; @@ -204,14 +204,14 @@ public class TestFilter { // Delete the second rows from both groups, one column at a time for(byte [] QUALIFIER : QUALIFIERS_ONE) { Delete d = new Delete(ROWS_ONE[1]); - d.deleteColumns(FAMILIES[0], QUALIFIER); - d.deleteColumns(FAMILIES[1], QUALIFIER); + d.addColumns(FAMILIES[0], QUALIFIER); + d.addColumns(FAMILIES[1], QUALIFIER); this.region.delete(d); } for(byte [] QUALIFIER : QUALIFIERS_TWO) { Delete d = new Delete(ROWS_TWO[1]); - d.deleteColumns(FAMILIES[0], QUALIFIER); - d.deleteColumns(FAMILIES[1], QUALIFIER); + d.addColumns(FAMILIES[0], QUALIFIER); + d.addColumns(FAMILIES[1], QUALIFIER); this.region.delete(d); } numRows -= 2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 5cf22810441..cb8b06f0368 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -312,7 +312,7 @@ public class TestImportExport { Delete d = new Delete(ROW1, now+3); t.delete(d); d = new Delete(ROW1); - d.deleteColumns(FAMILYA, QUAL, now+2); + d.addColumns(FAMILYA, QUAL, now+2); t.delete(d); String[] args = new String[] { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java index 2b96d8ccbe9..9830d64c8c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java @@ -191,7 +191,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { for (int i = 0; i < numRetries; i++) { try { Delete d = new Delete(Bytes.toBytes("KEY")); - d.deleteFamily(Bytes.toBytes(FAMILY)); + d.addFamily(Bytes.toBytes(FAMILY)); d.setCellVisibility(new CellVisibility("private&secret")); table.delete(d); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java index a9841db7bb6..c17d408a617 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java @@ -102,7 +102,7 @@ public class TestWALPlayer { t1.put(p); // delete one column Delete d = new Delete(ROW); - d.deleteColumns(FAMILY, COLUMN1); + d.addColumns(FAMILY, COLUMN1); t1.delete(d); // replay the WAL, map table 1 to table 2 diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java index 67e18013b4d..c825be473bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java @@ -254,11 +254,11 @@ public class TestMasterOperationsForRegionReplicas { Table metaTable = ADMIN.getConnection().getTable(TableName.META_TABLE_NAME); for (byte[] row : tableRows) { Delete deleteOneReplicaLocation = new Delete(row); - deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY, + deleteOneReplicaLocation.addColumns(HConstants.CATALOG_FAMILY, MetaTableAccessor.getServerColumn(1)); - deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY, + deleteOneReplicaLocation.addColumns(HConstants.CATALOG_FAMILY, MetaTableAccessor.getSeqNumColumn(1)); - deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY, + deleteOneReplicaLocation.addColumns(HConstants.CATALOG_FAMILY, MetaTableAccessor.getStartCodeColumn(1)); metaTable.delete(deleteOneReplicaLocation); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java index 7b487838e85..5ea219ac11c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java @@ -390,12 +390,12 @@ public class TestAtomicOperation { p.setDurability(Durability.ASYNC_WAL); rm.add(p); Delete d = new Delete(row); - d.deleteColumns(fam1, qual2, ts); + d.addColumns(fam1, qual2, ts); d.setDurability(Durability.ASYNC_WAL); rm.add(d); } else { Delete d = new Delete(row); - d.deleteColumns(fam1, qual1, ts); + d.addColumns(fam1, qual1, ts); d.setDurability(Durability.ASYNC_WAL); rm.add(d); Put p = new Put(row, ts); @@ -483,12 +483,12 @@ public class TestAtomicOperation { p.setDurability(Durability.ASYNC_WAL); mrm.add(p); Delete d = new Delete(row); - d.deleteColumns(fam1, qual1, ts); + d.addColumns(fam1, qual1, ts); d.setDurability(Durability.ASYNC_WAL); mrm.add(d); } else { Delete d = new Delete(row2); - d.deleteColumns(fam1, qual1, ts); + d.addColumns(fam1, qual1, ts); d.setDurability(Durability.ASYNC_WAL); mrm.add(d); Put p = new Put(row, ts); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java index b0a43b71b50..2dfbee6b850 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java @@ -181,9 +181,9 @@ public class TestBlocksRead { private void deleteFamily(String family, String row, long version) throws IOException { Delete del = new Delete(Bytes.toBytes(row)); - del.deleteFamily(Bytes.toBytes(family + "_ROWCOL"), version); - del.deleteFamily(Bytes.toBytes(family + "_ROW"), version); - del.deleteFamily(Bytes.toBytes(family + "_NONE"), version); + del.addFamily(Bytes.toBytes(family + "_ROWCOL"), version); + del.addFamily(Bytes.toBytes(family + "_ROW"), version); + del.addFamily(Bytes.toBytes(family + "_NONE"), version); region.delete(del); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java index 6bdda430421..b374bdc5fcf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java @@ -184,7 +184,7 @@ public class TestCompaction { for (int i = 0; i < compactionThreshold; i++) { Delete delete = new Delete(Bytes.add(STARTROW, Bytes.toBytes(i))); byte [][] famAndQf = {COLUMN_FAMILY, null}; - delete.deleteFamily(famAndQf[0]); + delete.addFamily(famAndQf[0]); r.delete(delete); } r.flush(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 9b82cc5a962..abd6d3ff815 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -1388,7 +1388,7 @@ public class TestHRegion { else break; Delete delete = new Delete(CellUtil.cloneRow(results.get(0))); - delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2")); + delete.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2")); r.delete(delete); results.clear(); } while (more); @@ -1690,7 +1690,7 @@ public class TestHRegion { assertFalse(res); Delete delete = new Delete(row1); - delete.deleteColumn(fam1, qf1); + delete.addColumn(fam1, qf1); res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), delete, true); assertFalse(res); @@ -1704,8 +1704,8 @@ public class TestHRegion { // checkAndDelete with correct value delete = new Delete(row1); - delete.deleteColumn(fam1, qf1); - delete.deleteColumn(fam1, qf1); + delete.addColumn(fam1, qf1); + delete.addColumn(fam1, qf1); res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), delete, true); assertTrue(res); @@ -1752,7 +1752,7 @@ public class TestHRegion { // checkAndDelete with wrong value Delete delete = new Delete(row1); - delete.deleteFamily(fam1); + delete.addFamily(fam1); res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), put, true); assertEquals(false, res); @@ -1785,7 +1785,7 @@ public class TestHRegion { // checkAndDelete with correct value Delete delete = new Delete(row1); - delete.deleteColumn(fam1, qf1); + delete.addColumn(fam1, qf1); res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), delete, true); assertEquals(true, res); @@ -1992,9 +1992,9 @@ public class TestHRegion { // Multi-column delete Delete delete = new Delete(row1); - delete.deleteColumn(fam1, qf1); - delete.deleteColumn(fam2, qf1); - delete.deleteColumn(fam1, qf3); + delete.addColumn(fam1, qf1); + delete.addColumn(fam2, qf1); + delete.addColumn(fam1, qf3); boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator( val2), delete, true); assertEquals(true, res); @@ -2010,7 +2010,7 @@ public class TestHRegion { // Family delete delete = new Delete(row1); - delete.deleteFamily(fam2); + delete.addFamily(fam2); res = region.checkAndMutate(row1, fam2, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), delete, true); assertEquals(true, res); @@ -2055,8 +2055,8 @@ public class TestHRegion { // We do support deleting more than 1 'latest' version Delete delete = new Delete(row1); - delete.deleteColumn(fam1, qual); - delete.deleteColumn(fam1, qual); + delete.addColumn(fam1, qual); + delete.addColumn(fam1, qual); region.delete(delete); Get get = new Get(row1); @@ -2143,7 +2143,7 @@ public class TestHRegion { // ok now delete a split: Delete delete = new Delete(row); - delete.deleteColumns(fam, splitA); + delete.addColumns(fam, splitA); region.delete(delete); // assert some things: @@ -2317,7 +2317,7 @@ public class TestHRegion { byte[] value = Bytes.toBytes("value"); Delete delete = new Delete(rowA); - delete.deleteFamily(fam1); + delete.addFamily(fam1); region.delete(delete); @@ -2350,14 +2350,14 @@ public class TestHRegion { @Test public void testDeleteColumns_PostInsert() throws IOException, InterruptedException { Delete delete = new Delete(row); - delete.deleteColumns(fam1, qual1); + delete.addColumns(fam1, qual1); doTestDelete_AndPostInsert(delete); } @Test - public void testDeleteFamily_PostInsert() throws IOException, InterruptedException { + public void testaddFamily_PostInsert() throws IOException, InterruptedException { Delete delete = new Delete(row); - delete.deleteFamily(fam1); + delete.addFamily(fam1); doTestDelete_AndPostInsert(delete); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index bb72b1d8ba9..3e32772d1bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -204,7 +204,7 @@ public class TestKeepDeletes { region.put(p); Delete d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts); + d.addColumn(c0, c0, ts); region.delete(d); // scan still returns delete markers and deletes rows @@ -256,7 +256,7 @@ public class TestKeepDeletes { Delete d = new Delete(T1, ts+2); - d.deleteColumn(c0, c0, ts); + d.addColumn(c0, c0, ts); region.delete(d); // "past" get does not see rows behind delete marker @@ -335,11 +335,11 @@ public class TestKeepDeletes { region.delete(d); d = new Delete(T1, ts+2); - d.deleteColumn(c0, c0, ts+2); + d.addColumn(c0, c0, ts+2); region.delete(d); d = new Delete(T1, ts+3); - d.deleteColumns(c0, c0, ts+3); + d.addColumns(c0, c0, ts+3); region.delete(d); Scan s = new Scan(); @@ -413,19 +413,19 @@ public class TestKeepDeletes { long ts = EnvironmentEdgeManager.currentTime(); Delete d = new Delete(T1, ts); - d.deleteColumns(c0, c0, ts); + d.addColumns(c0, c0, ts); region.delete(d); d = new Delete(T1, ts); - d.deleteFamily(c0); + d.addFamily(c0); region.delete(d); d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts+1); + d.addColumn(c0, c0, ts+1); region.delete(d); d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts+2); + d.addColumn(c0, c0, ts+2); region.delete(d); // 1 family marker, 1 column marker, 2 version markers @@ -466,19 +466,19 @@ public class TestKeepDeletes { // all the following deletes affect the put Delete d = new Delete(T1, ts); - d.deleteColumns(c0, c0, ts); + d.addColumns(c0, c0, ts); region.delete(d); d = new Delete(T1, ts); - d.deleteFamily(c0, ts); + d.addFamily(c0, ts); region.delete(d); d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts+1); + d.addColumn(c0, c0, ts+1); region.delete(d); d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts+2); + d.addColumn(c0, c0, ts+2); region.delete(d); // 1 family marker, 1 column marker, 2 version markers @@ -529,19 +529,19 @@ public class TestKeepDeletes { // all the following deletes affect the put Delete d = new Delete(T1, ts); - d.deleteColumns(c0, c0, ts); + d.addColumns(c0, c0, ts); region.delete(d); d = new Delete(T1, ts); - d.deleteFamily(c0, ts); + d.addFamily(c0, ts); region.delete(d); d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts+1); + d.addColumn(c0, c0, ts+1); region.delete(d); d = new Delete(T1, ts); - d.deleteColumn(c0, c0, ts+2); + d.addColumn(c0, c0, ts+2); region.delete(d); // 1 family marker, 1 column marker, 2 version markers @@ -624,20 +624,20 @@ public class TestKeepDeletes { region.put(p); Delete d = new Delete(T1, ts+2); - d.deleteColumns(c0, c0, ts+2); + d.addColumns(c0, c0, ts+2); region.delete(d); d = new Delete(T1, ts+2); - d.deleteFamily(c1, ts+2); + d.addFamily(c1, ts+2); region.delete(d); d = new Delete(T2, ts+2); - d.deleteFamily(c0, ts+2); + d.addFamily(c0, ts+2); region.delete(d); // add an older delete, to make sure it is filtered d = new Delete(T1, ts-10); - d.deleteFamily(c1, ts-10); + d.addFamily(c1, ts-10); region.delete(d); // ts + 2 does NOT include the delete at ts+2 @@ -689,15 +689,15 @@ public class TestKeepDeletes { Delete d = new Delete(T1, ts); // test corner case (Put and Delete have same TS) - d.deleteColumns(c0, c0, ts); + d.addColumns(c0, c0, ts); region.delete(d); d = new Delete(T1, ts+1); - d.deleteColumn(c0, c0, ts+1); + d.addColumn(c0, c0, ts+1); region.delete(d); d = new Delete(T1, ts+3); - d.deleteColumn(c0, c0, ts+3); + d.addColumn(c0, c0, ts+3); region.delete(d); region.flush(true); @@ -842,7 +842,7 @@ public class TestKeepDeletes { region.delete(d); // and a column delete marker d = new Delete(T1, ts-2); - d.deleteColumns(c0, c0, ts-1); + d.addColumns(c0, c0, ts-1); region.delete(d); Get g = new Get(T1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index ede07935e82..3ef89ad529c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -249,7 +249,7 @@ public class TestMajorCompaction { LOG.debug("Adding deletes to memstore and flushing"); Delete delete = new Delete(secondRowBytes, System.currentTimeMillis()); byte [][] famAndQf = {COLUMN_FAMILY, null}; - delete.deleteFamily(famAndQf[0]); + delete.addFamily(famAndQf[0]); r.delete(delete); // Assert deleted. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java index 261c007d8ad..1bd20c64cf5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java @@ -105,14 +105,14 @@ public class TestMinorCompaction { public void testMinorCompactionWithDeleteColumn1() throws Exception { Delete dc = new Delete(secondRowBytes); /* delete all timestamps in the column */ - dc.deleteColumns(fam2, col2); + dc.addColumns(fam2, col2); testMinorCompactionWithDelete(dc); } @Test public void testMinorCompactionWithDeleteColumn2() throws Exception { Delete dc = new Delete(secondRowBytes); - dc.deleteColumn(fam2, col2); + dc.addColumn(fam2, col2); /* compactionThreshold is 3. The table has 4 versions: 0, 1, 2, and 3. * we only delete the latest version. One might expect to see only * versions 1 and 2. HBase differs, and gives us 0, 1 and 2. @@ -125,14 +125,14 @@ public class TestMinorCompaction { @Test public void testMinorCompactionWithDeleteColumnFamily() throws Exception { Delete deleteCF = new Delete(secondRowBytes); - deleteCF.deleteFamily(fam2); + deleteCF.addFamily(fam2); testMinorCompactionWithDelete(deleteCF); } @Test public void testMinorCompactionWithDeleteVersion1() throws Exception { Delete deleteVersion = new Delete(secondRowBytes); - deleteVersion.deleteColumns(fam2, col2, 2); + deleteVersion.addColumns(fam2, col2, 2); /* compactionThreshold is 3. The table has 4 versions: 0, 1, 2, and 3. * We delete versions 0 ... 2. So, we still have one remaining. */ @@ -142,7 +142,7 @@ public class TestMinorCompaction { @Test public void testMinorCompactionWithDeleteVersion2() throws Exception { Delete deleteVersion = new Delete(secondRowBytes); - deleteVersion.deleteColumn(fam2, col2, 1); + deleteVersion.addColumn(fam2, col2, 1); /* * the table has 4 versions: 0, 1, 2, and 3. * We delete 1. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index 5224647c061..ff6f09b2645 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -209,7 +209,7 @@ public class TestMultiColumnScanner { boolean deletedSomething = false; for (long ts : TIMESTAMPS) if (rand.nextDouble() < DELETE_PROBABILITY) { - d.deleteColumns(FAMILY_BYTES, qualBytes, ts); + d.addColumns(FAMILY_BYTES, qualBytes, ts); String rowAndQual = row + "_" + qual; Long whenDeleted = lastDelTimeMap.get(rowAndQual); lastDelTimeMap.put(rowAndQual, whenDeleted == null ? ts diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java index 8f0cd4cfb8a..1b427542eb5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -526,7 +526,7 @@ public class TestScanner { Delete dc = new Delete(firstRowBytes); /* delete column1 of firstRow */ - dc.deleteColumns(fam1, col1); + dc.addColumns(fam1, col1); region.delete(dc); region.flush(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index fc3735ac0f4..b31be9daecd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -323,7 +323,7 @@ public class TestSeekOptimizations { } public void delAtTimestamp(String qual, long ts) { - del.deleteColumn(FAMILY_BYTES, Bytes.toBytes(qual), ts); + del.addColumn(FAMILY_BYTES, Bytes.toBytes(qual), ts); logDelete(qual, ts, "at"); } @@ -336,7 +336,7 @@ public class TestSeekOptimizations { } private void delUpToTimestamp(String qual, long upToTS) { - del.deleteColumns(FAMILY_BYTES, Bytes.toBytes(qual), upToTS); + del.addColumns(FAMILY_BYTES, Bytes.toBytes(qual), upToTS); logDelete(qual, upToTS, "up to and including"); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java index c9b20d52149..29a052b9f34 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java @@ -409,7 +409,7 @@ public class TestPerTableCFReplication { Table source, Table... targets) throws Exception { Delete del = new Delete(row); - del.deleteFamily(fam); + del.addFamily(fam); source.delete(del); Get get = new Get(row); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java index 7d51ef52a70..30249fded74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java @@ -153,7 +153,7 @@ public class TestReplicationSmallTests extends TestReplicationBase { } // place a version delete marker (delete last version) Delete d = new Delete(row); - d.deleteColumn(famName, row, t); + d.addColumn(famName, row, t); htable1.delete(d); get = new Get(row); @@ -175,7 +175,7 @@ public class TestReplicationSmallTests extends TestReplicationBase { // place a column delete marker d = new Delete(row); - d.deleteColumns(famName, row, t+2); + d.addColumns(famName, row, t+2); htable1.delete(d); // now *both* of the remaining version should be deleted diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 8d97915e0e0..8ecc6e367e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -935,7 +935,7 @@ public class TestAccessController extends SecureTestUtil { @Override public Object run() throws Exception { Delete d = new Delete(TEST_ROW); - d.deleteFamily(TEST_FAMILY); + d.addFamily(TEST_FAMILY); try(Connection conn = ConnectionFactory.createConnection(conf); Table t = conn.getTable(TEST_TABLE)) { t.delete(d); @@ -968,7 +968,7 @@ public class TestAccessController extends SecureTestUtil { @Override public Object run() throws Exception { Delete d = new Delete(TEST_ROW); - d.deleteFamily(TEST_FAMILY); + d.addFamily(TEST_FAMILY); try(Connection conn = ConnectionFactory.createConnection(conf); Table t = conn.getTable(TEST_TABLE);) { t.checkAndDelete(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER, @@ -1333,8 +1333,8 @@ public class TestAccessController extends SecureTestUtil { @Override public Object run() throws Exception { Delete d = new Delete(TEST_ROW); - d.deleteFamily(family1); - d.deleteFamily(family2); + d.addFamily(family1); + d.addFamily(family2); try (Connection conn = ConnectionFactory.createConnection(conf); Table t = conn.getTable(tableName)) { t.delete(d); @@ -1347,7 +1347,7 @@ public class TestAccessController extends SecureTestUtil { @Override public Object run() throws Exception { Delete d = new Delete(TEST_ROW); - d.deleteFamily(family1); + d.addFamily(family1); try (Connection conn = ConnectionFactory.createConnection(conf); Table t = conn.getTable(tableName)) { t.delete(d); @@ -1360,7 +1360,7 @@ public class TestAccessController extends SecureTestUtil { @Override public Object run() throws Exception { Delete d = new Delete(TEST_ROW); - d.deleteFamily(family2); + d.addFamily(family2); try (Connection conn = ConnectionFactory.createConnection(conf); Table t = conn.getTable(tableName)) { t.delete(d); @@ -1528,7 +1528,7 @@ public class TestAccessController extends SecureTestUtil { @Override public Object run() throws Exception { Delete d = new Delete(TEST_ROW); - d.deleteColumn(family1, qualifier); + d.addColumn(family1, qualifier); // d.deleteFamily(family1); try (Connection conn = ConnectionFactory.createConnection(conf); Table t = conn.getTable(tableName)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java index 549db3cbd7d..bbc6ad0ef4f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java @@ -327,8 +327,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Delete d = new Delete(TEST_ROW1); - d.deleteColumns(TEST_FAMILY1, TEST_Q1); - d.deleteColumns(TEST_FAMILY1, TEST_Q2); + d.addColumns(TEST_FAMILY1, TEST_Q1); + d.addColumns(TEST_FAMILY1, TEST_Q2); t.delete(d); } } @@ -350,7 +350,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Delete d = new Delete(TEST_ROW2); - d.deleteFamily(TEST_FAMILY1); + d.addFamily(TEST_FAMILY1); t.delete(d); } } @@ -522,7 +522,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Delete d = new Delete(TEST_ROW, 124L); - d.deleteColumns(TEST_FAMILY1, TEST_Q1); + d.addColumns(TEST_FAMILY1, TEST_Q1); t.delete(d); } } @@ -537,7 +537,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Delete d = new Delete(TEST_ROW); - d.deleteColumns(TEST_FAMILY1, TEST_Q2, 124L); + d.addColumns(TEST_FAMILY1, TEST_Q2, 124L); t.delete(d); } } @@ -616,9 +616,9 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Delete d = new Delete(TEST_ROW1); - d.deleteColumn(TEST_FAMILY1, TEST_Q1, 123); - d.deleteColumn(TEST_FAMILY1, TEST_Q2); - d.deleteFamilyVersion(TEST_FAMILY2, 125); + d.addColumn(TEST_FAMILY1, TEST_Q1, 123); + d.addColumn(TEST_FAMILY1, TEST_Q2); + d.addFamilyVersion(TEST_FAMILY2, 125); t.delete(d); } } @@ -905,7 +905,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Delete d = new Delete(TEST_ROW1); - d.deleteColumns(TEST_FAMILY1, TEST_Q1, 120); + d.addColumns(TEST_FAMILY1, TEST_Q1, 120); t.checkAndDelete(TEST_ROW1, TEST_FAMILY1, TEST_Q1, ZERO, d); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java index 7f1e7205c17..5ddfb9c83a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java @@ -348,7 +348,7 @@ public class TestCellACLs extends SecureTestUtil { AccessTestAction deleteFamily = new AccessTestAction() { @Override public Object run() throws Exception { - Delete delete = new Delete(TEST_ROW).deleteFamily(TEST_FAMILY); + Delete delete = new Delete(TEST_ROW).addFamily(TEST_FAMILY); try(Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName())) { t.delete(delete); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index d538498b5c0..104cb5b52c5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -127,7 +127,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer for (byte[] authLabel : authLabels) { String authLabelStr = Bytes.toString(authLabel); if (currentAuths.contains(authLabelStr)) { - d.deleteColumns(LABELS_TABLE_FAMILY, authLabel); + d.addColumns(LABELS_TABLE_FAMILY, authLabel); } else { // This label is not set for the user. finalOpStatus[i] = new OperationStatus(OperationStatusCode.FAILURE, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java index c67d8692d72..52c43b2a95d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java @@ -201,7 +201,7 @@ public class TestVisibilityLabelsWithDeletes { Table table = connection.getTable(tableName)) { Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL)); - d.deleteFamilyVersion(fam, 123l); + d.addFamilyVersion(fam, 123l); table.delete(d); } catch (Throwable t) { throw new IOException(t); @@ -1799,11 +1799,11 @@ public class TestVisibilityLabelsWithDeletes { Table table = connection.getTable(tableName)) { Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); - d.deleteFamilyVersion(fam, 123l); + d.addFamilyVersion(fam, 123l); table.delete(d); d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); - d.deleteFamilyVersion(fam, 125l); + d.addFamilyVersion(fam, 125l); table.delete(d); } catch (Throwable t) { throw new IOException(t); @@ -1856,7 +1856,7 @@ public class TestVisibilityLabelsWithDeletes { table.delete(d); d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); - d.deleteFamilyVersion(fam, 125l); + d.addFamilyVersion(fam, 125l); table.delete(d); } catch (Throwable t) { throw new IOException(t); @@ -1957,7 +1957,7 @@ public class TestVisibilityLabelsWithDeletes { d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); - d.deleteFamilyVersion(fam, 125l); + d.addFamilyVersion(fam, 125l); table.delete(d); } catch (Throwable t) { throw new IOException(t); @@ -2715,7 +2715,7 @@ public class TestVisibilityLabelsWithDeletes { try (Connection connection = ConnectionFactory.createConnection(conf); Table table = connection.getTable(tableName)) { Delete d = new Delete(row1); - d.deleteFamilyVersion(fam, 126l); + d.addFamilyVersion(fam, 126l); table.delete(d); } catch (Throwable t) { throw new IOException(t); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java index a4670718331..e28acc68274 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java @@ -216,7 +216,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase { Delete delete = new Delete(rowKey); // Delete all versions since a put // could be called multiple times if CM is used - delete.deleteColumns(cf, column); + delete.addColumns(cf, column); mutate(table, delete, rowKeyBase, rowKey, cf, column, checkedValue); buf.append(MutationType.DELETE.getNumber()); break; diff --git a/hbase-shell/src/main/ruby/hbase/table.rb b/hbase-shell/src/main/ruby/hbase/table.rb index 153f07e7e9e..2535a68b4d4 100644 --- a/hbase-shell/src/main/ruby/hbase/table.rb +++ b/hbase-shell/src/main/ruby/hbase/table.rb @@ -193,7 +193,7 @@ EOF end if column family, qualifier = parse_column_name(column) - d.deleteColumns(family, qualifier, timestamp) + d.addColumns(family, qualifier, timestamp) end @table.delete(d) end diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 342c7d154c6..cc8cd29c776 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -1136,9 +1136,9 @@ public class ThriftServerRunner implements Runnable { addAttributes(delete, attributes); byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); if (famAndQf.length == 1) { - delete.deleteFamily(famAndQf[0], timestamp); + delete.addFamily(famAndQf[0], timestamp); } else { - delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp); + delete.addColumns(famAndQf[0], famAndQf[1], timestamp); } table.delete(delete); @@ -1250,9 +1250,9 @@ public class ThriftServerRunner implements Runnable { byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column)); if (m.isDelete) { if (famAndQf.length == 1) { - delete.deleteFamily(famAndQf[0], timestamp); + delete.addFamily(famAndQf[0], timestamp); } else { - delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp); + delete.addColumns(famAndQf[0], famAndQf[1], timestamp); } delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL); @@ -1310,9 +1310,9 @@ public class ThriftServerRunner implements Runnable { if (m.isDelete) { // no qualifier, family only. if (famAndQf.length == 1) { - delete.deleteFamily(famAndQf[0], timestamp); + delete.addFamily(famAndQf[0], timestamp); } else { - delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp); + delete.addColumns(famAndQf[0], famAndQf[1], timestamp); } delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index bef35f64c72..8811e6deffd 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -267,22 +267,22 @@ public class ThriftUtilities { if (column.isSetTimestamp()) { if (in.isSetDeleteType() && in.getDeleteType().equals(TDeleteType.DELETE_COLUMNS)) - out.deleteColumns(column.getFamily(), column.getQualifier(), column.getTimestamp()); + out.addColumns(column.getFamily(), column.getQualifier(), column.getTimestamp()); else - out.deleteColumn(column.getFamily(), column.getQualifier(), column.getTimestamp()); + out.addColumn(column.getFamily(), column.getQualifier(), column.getTimestamp()); } else { if (in.isSetDeleteType() && in.getDeleteType().equals(TDeleteType.DELETE_COLUMNS)) - out.deleteColumns(column.getFamily(), column.getQualifier()); + out.addColumns(column.getFamily(), column.getQualifier()); else - out.deleteColumn(column.getFamily(), column.getQualifier()); + out.addColumn(column.getFamily(), column.getQualifier()); } } else { if (column.isSetTimestamp()) { - out.deleteFamily(column.getFamily(), column.getTimestamp()); + out.addFamily(column.getFamily(), column.getTimestamp()); } else { - out.deleteFamily(column.getFamily()); + out.addFamily(column.getFamily()); } } }