From 0d17817fde20329a5153bf26cb3f3a4a7fc80b4b Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Fri, 17 Feb 2012 22:58:01 +0000 Subject: [PATCH] HBASE-3584 Rename RowMutation to RowMutations git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1245792 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/hadoop/hbase/client/HTable.java | 2 +- .../hadoop/hbase/client/HTableInterface.java | 2 +- .../hadoop/hbase/client/RowMutation.java | 126 ------------------ .../hadoop/hbase/client/RowMutations.java | 126 ++++++++++++++++++ .../hbase/coprocessor/CoprocessorHost.java | 2 +- .../hadoop/hbase/io/HbaseObjectWritable.java | 4 +- .../hadoop/hbase/ipc/HRegionInterface.java | 4 +- .../hadoop/hbase/regionserver/HRegion.java | 4 +- .../hbase/regionserver/HRegionServer.java | 8 +- .../hbase/rest/client/RemoteHTable.java | 4 +- .../hbase/client/TestFromClientSide.java | 4 +- .../TestRegionObserverInterface.java | 2 +- .../regionserver/TestAtomicOperation.java | 4 +- 13 files changed, 146 insertions(+), 146 deletions(-) create mode 100644 src/main/java/org/apache/hadoop/hbase/client/RowMutations.java diff --git a/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 57605e622f7..29b8004b943 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -752,7 +752,7 @@ public class HTable implements HTableInterface { * {@inheritDoc} */ @Override - public void mutateRow(final RowMutation rm) throws IOException { + public void mutateRow(final RowMutations rm) throws IOException { new ServerCallable(connection, tableName, rm.getRow(), operationTimeout) { public Void call() throws IOException { diff --git a/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java b/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java index 784fdc27bf1..9b3c68b439d 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java @@ -272,7 +272,7 @@ public interface HTableInterface extends Closeable { * atomically * @throws IOException */ - public void mutateRow(final RowMutation rm) throws IOException; + public void mutateRow(final RowMutations rm) throws IOException; /** * Appends values to one or more columns within a single row. diff --git a/src/main/java/org/apache/hadoop/hbase/client/RowMutation.java b/src/main/java/org/apache/hadoop/hbase/client/RowMutation.java index 63cd192f96c..e69de29bb2d 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/RowMutation.java +++ b/src/main/java/org/apache/hadoop/hbase/client/RowMutation.java @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.client; - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.hbase.util.Bytes; - -/** - * Performs multiple mutations atomically on a single row. - * Currently {@link Put} and {@link Delete} are supported. - * - * The mutations are performed in the order in which they - * were added. - */ -public class RowMutation implements Row { - private List mutations = new ArrayList(); - private byte [] row; - private static final byte VERSION = (byte)0; - - /** Constructor for Writable. DO NOT USE */ - public RowMutation() {} - - /** - * Create an atomic mutation for the specified row. - * @param row row key - */ - public RowMutation(byte [] row) { - if(row == null || row.length > HConstants.MAX_ROW_LENGTH) { - throw new IllegalArgumentException("Row key is invalid"); - } - this.row = Arrays.copyOf(row, row.length); - } - - /** - * Add a {@link Put} operation to the list of mutations - * @param p The {@link Put} to add - * @throws IOException - */ - public void add(Put p) throws IOException { - internalAdd(p); - } - - /** - * Add a {@link Delete} operation to the list of mutations - * @param d The {@link Delete} to add - * @throws IOException - */ - public void add(Delete d) throws IOException { - internalAdd(d); - } - - private void internalAdd(Mutation m) throws IOException { - int res = Bytes.compareTo(this.row, m.getRow()); - if(res != 0) { - throw new IOException("The row in the recently added Put/Delete " + - Bytes.toStringBinary(m.getRow()) + " doesn't match the original one " + - Bytes.toStringBinary(this.row)); - } - mutations.add(m); - } - - @Override - public void readFields(final DataInput in) throws IOException { - int version = in.readByte(); - if (version > VERSION) { - throw new IOException("version not supported"); - } - this.row = Bytes.readByteArray(in); - int numMutations = in.readInt(); - mutations.clear(); - for(int i = 0; i < numMutations; i++) { - mutations.add((Mutation) HbaseObjectWritable.readObject(in, null)); - } - } - - @Override - public void write(final DataOutput out) throws IOException { - out.writeByte(VERSION); - Bytes.writeByteArray(out, this.row); - out.writeInt(mutations.size()); - for (Mutation m : mutations) { - HbaseObjectWritable.writeObject(out, m, m.getClass(), null); - } - } - - @Override - public int compareTo(Row i) { - return Bytes.compareTo(this.getRow(), i.getRow()); - } - - @Override - public byte[] getRow() { - return row; - } - - /** - * @return An unmodifiable list of the current mutations. - */ - public List getMutations() { - return Collections.unmodifiableList(mutations); - } -} diff --git a/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java b/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java new file mode 100644 index 00000000000..228c7981e14 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * Performs multiple mutations atomically on a single row. + * Currently {@link Put} and {@link Delete} are supported. + * + * The mutations are performed in the order in which they + * were added. + */ +public class RowMutations implements Row { + private List mutations = new ArrayList(); + private byte [] row; + private static final byte VERSION = (byte)0; + + /** Constructor for Writable. DO NOT USE */ + public RowMutations() {} + + /** + * Create an atomic mutation for the specified row. + * @param row row key + */ + public RowMutations(byte [] row) { + if(row == null || row.length > HConstants.MAX_ROW_LENGTH) { + throw new IllegalArgumentException("Row key is invalid"); + } + this.row = Arrays.copyOf(row, row.length); + } + + /** + * Add a {@link Put} operation to the list of mutations + * @param p The {@link Put} to add + * @throws IOException + */ + public void add(Put p) throws IOException { + internalAdd(p); + } + + /** + * Add a {@link Delete} operation to the list of mutations + * @param d The {@link Delete} to add + * @throws IOException + */ + public void add(Delete d) throws IOException { + internalAdd(d); + } + + private void internalAdd(Mutation m) throws IOException { + int res = Bytes.compareTo(this.row, m.getRow()); + if(res != 0) { + throw new IOException("The row in the recently added Put/Delete " + + Bytes.toStringBinary(m.getRow()) + " doesn't match the original one " + + Bytes.toStringBinary(this.row)); + } + mutations.add(m); + } + + @Override + public void readFields(final DataInput in) throws IOException { + int version = in.readByte(); + if (version > VERSION) { + throw new IOException("version not supported"); + } + this.row = Bytes.readByteArray(in); + int numMutations = in.readInt(); + mutations.clear(); + for(int i = 0; i < numMutations; i++) { + mutations.add((Mutation) HbaseObjectWritable.readObject(in, null)); + } + } + + @Override + public void write(final DataOutput out) throws IOException { + out.writeByte(VERSION); + Bytes.writeByteArray(out, this.row); + out.writeInt(mutations.size()); + for (Mutation m : mutations) { + HbaseObjectWritable.writeObject(out, m, m.getClass(), null); + } + } + + @Override + public int compareTo(Row i) { + return Bytes.compareTo(this.getRow(), i.getRow()); + } + + @Override + public byte[] getRow() { + return row; + } + + /** + * @return An unmodifiable list of the current mutations. + */ + public List getMutations() { + return Collections.unmodifiableList(mutations); + } +} diff --git a/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index b1b5a782370..b6412ca67b7 100644 --- a/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -503,7 +503,7 @@ public abstract class CoprocessorHost { } @Override - public void mutateRow(RowMutation rm) throws IOException { + public void mutateRow(RowMutations rm) throws IOException { table.mutateRow(rm); } } diff --git a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java index 5c206e507e4..2f244b4534d 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java +++ b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java @@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.client.MultiResponse; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.RowMutation; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.coprocessor.Exec; import org.apache.hadoop.hbase.filter.BinaryComparator; @@ -257,7 +257,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur addToMap(Append.class, code++); - addToMap(RowMutation.class, code++); + addToMap(RowMutations.class, code++); addToMap(Message.class, code++); diff --git a/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java b/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java index 0431444e917..a3e766957fc 100644 --- a/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java +++ b/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HServerInfo; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.client.Append; -import org.apache.hadoop.hbase.client.RowMutation; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; @@ -263,7 +263,7 @@ public interface HRegionInterface extends VersionedProtocol, Stoppable, Abortabl byte [] family, byte [] qualifier, long amount, boolean writeToWAL) throws IOException; - public void mutateRow(byte[] regionName, RowMutation rm) + public void mutateRow(byte[] regionName, RowMutations rm) throws IOException; /** diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 921774dcc7d..dfd29ee581a 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Append; -import org.apache.hadoop.hbase.client.RowMutation; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; @@ -4149,7 +4149,7 @@ public class HRegion implements HeapSize { // , Writable{ return results; } - public void mutateRow(RowMutation rm) throws IOException { + public void mutateRow(RowMutations rm) throws IOException { mutateRowsWithLocks(rm.getMutations(), Collections.singleton(rm.getRow())); } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 2afe1595587..f8ba3ded2da 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -83,7 +83,7 @@ import org.apache.hadoop.hbase.catalog.MetaReader; import org.apache.hadoop.hbase.catalog.RootLocationEditor; import org.apache.hadoop.hbase.client.Action; import org.apache.hadoop.hbase.client.Append; -import org.apache.hadoop.hbase.client.RowMutation; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HConnectionManager; @@ -3156,7 +3156,7 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } @Override - public void mutateRow(byte[] regionName, RowMutation rm) + public void mutateRow(byte[] regionName, RowMutations rm) throws IOException { checkOpen(); if (regionName == null) { @@ -3321,8 +3321,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } else if (action instanceof Append) { response.add(regionName, originalIndex, append(regionName, (Append)action)); - } else if (action instanceof RowMutation) { - mutateRow(regionName, (RowMutation)action); + } else if (action instanceof RowMutations) { + mutateRow(regionName, (RowMutations)action); response.add(regionName, originalIndex, new Result()); } else { LOG.debug("Error: invalid Action, row must be a Get, Delete, " + diff --git a/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index 39199855e2f..8acc5266fdd 100644 --- a/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.client.RowMutation; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableInterface; @@ -650,7 +650,7 @@ public class RemoteHTable implements HTableInterface { } @Override - public void mutateRow(RowMutation rm) throws IOException { + public void mutateRow(RowMutations rm) throws IOException { throw new IOException("atomicMutation not supported"); } } diff --git a/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 2c51dc9e070..a10f31c78a4 100644 --- a/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -4078,7 +4078,7 @@ public class TestFromClientSide { byte [][] QUALIFIERS = new byte [][] { Bytes.toBytes("a"), Bytes.toBytes("b") }; - RowMutation arm = new RowMutation(ROW); + RowMutations arm = new RowMutations(ROW); Put p = new Put(ROW); p.add(FAMILY, QUALIFIERS[0], VALUE); arm.add(p); @@ -4088,7 +4088,7 @@ public class TestFromClientSide { Result r = t.get(g); assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIERS[0]))); - arm = new RowMutation(ROW); + arm = new RowMutations(ROW); p = new Put(ROW); p.add(FAMILY, QUALIFIERS[1], VALUE); arm.add(p); diff --git a/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index 88ad22014fe..1b3b6df3d32 100644 --- a/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ b/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -151,7 +151,7 @@ public class TestRegionObserverInterface { delete.deleteColumn(B, B); delete.deleteColumn(C, C); - RowMutation arm = new RowMutation(ROW); + RowMutations arm = new RowMutations(ROW); arm.add(put); arm.add(delete); table.mutateRow(arm); diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java index 6e81542e078..1640f296b0f 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java @@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Mutation; -import org.apache.hadoop.hbase.client.RowMutation; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -280,7 +280,7 @@ public class TestAtomicOperation extends HBaseTestCase { region.flushcache(); } long ts = timeStamps.incrementAndGet(); - RowMutation rm = new RowMutation(row); + RowMutations rm = new RowMutations(row); if (op) { Put p = new Put(row, ts); p.add(fam1, qual1, value1);