HBASE-3584 Rename RowMutation to RowMutations

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1245792 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-02-17 22:58:01 +00:00
parent 9684065bec
commit 0d17817fde
13 changed files with 146 additions and 146 deletions

View File

@ -752,7 +752,7 @@ public class HTable implements HTableInterface {
* {@inheritDoc} * {@inheritDoc}
*/ */
@Override @Override
public void mutateRow(final RowMutation rm) throws IOException { public void mutateRow(final RowMutations rm) throws IOException {
new ServerCallable<Void>(connection, tableName, rm.getRow(), new ServerCallable<Void>(connection, tableName, rm.getRow(),
operationTimeout) { operationTimeout) {
public Void call() throws IOException { public Void call() throws IOException {

View File

@ -272,7 +272,7 @@ public interface HTableInterface extends Closeable {
* atomically * atomically
* @throws IOException * @throws IOException
*/ */
public void mutateRow(final RowMutation rm) throws IOException; public void mutateRow(final RowMutations rm) throws IOException;
/** /**
* Appends values to one or more columns within a single row. * Appends values to one or more columns within a single row.

View File

@ -1,126 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Performs multiple mutations atomically on a single row.
* Currently {@link Put} and {@link Delete} are supported.
*
* The mutations are performed in the order in which they
* were added.
*/
public class RowMutation implements Row {
private List<Mutation> mutations = new ArrayList<Mutation>();
private byte [] row;
private static final byte VERSION = (byte)0;
/** Constructor for Writable. DO NOT USE */
public RowMutation() {}
/**
* Create an atomic mutation for the specified row.
* @param row row key
*/
public RowMutation(byte [] row) {
if(row == null || row.length > HConstants.MAX_ROW_LENGTH) {
throw new IllegalArgumentException("Row key is invalid");
}
this.row = Arrays.copyOf(row, row.length);
}
/**
* Add a {@link Put} operation to the list of mutations
* @param p The {@link Put} to add
* @throws IOException
*/
public void add(Put p) throws IOException {
internalAdd(p);
}
/**
* Add a {@link Delete} operation to the list of mutations
* @param d The {@link Delete} to add
* @throws IOException
*/
public void add(Delete d) throws IOException {
internalAdd(d);
}
private void internalAdd(Mutation m) throws IOException {
int res = Bytes.compareTo(this.row, m.getRow());
if(res != 0) {
throw new IOException("The row in the recently added Put/Delete " +
Bytes.toStringBinary(m.getRow()) + " doesn't match the original one " +
Bytes.toStringBinary(this.row));
}
mutations.add(m);
}
@Override
public void readFields(final DataInput in) throws IOException {
int version = in.readByte();
if (version > VERSION) {
throw new IOException("version not supported");
}
this.row = Bytes.readByteArray(in);
int numMutations = in.readInt();
mutations.clear();
for(int i = 0; i < numMutations; i++) {
mutations.add((Mutation) HbaseObjectWritable.readObject(in, null));
}
}
@Override
public void write(final DataOutput out) throws IOException {
out.writeByte(VERSION);
Bytes.writeByteArray(out, this.row);
out.writeInt(mutations.size());
for (Mutation m : mutations) {
HbaseObjectWritable.writeObject(out, m, m.getClass(), null);
}
}
@Override
public int compareTo(Row i) {
return Bytes.compareTo(this.getRow(), i.getRow());
}
@Override
public byte[] getRow() {
return row;
}
/**
* @return An unmodifiable list of the current mutations.
*/
public List<Mutation> getMutations() {
return Collections.unmodifiableList(mutations);
}
}

View File

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Performs multiple mutations atomically on a single row.
* Currently {@link Put} and {@link Delete} are supported.
*
* The mutations are performed in the order in which they
* were added.
*/
public class RowMutations implements Row {
private List<Mutation> mutations = new ArrayList<Mutation>();
private byte [] row;
private static final byte VERSION = (byte)0;
/** Constructor for Writable. DO NOT USE */
public RowMutations() {}
/**
* Create an atomic mutation for the specified row.
* @param row row key
*/
public RowMutations(byte [] row) {
if(row == null || row.length > HConstants.MAX_ROW_LENGTH) {
throw new IllegalArgumentException("Row key is invalid");
}
this.row = Arrays.copyOf(row, row.length);
}
/**
* Add a {@link Put} operation to the list of mutations
* @param p The {@link Put} to add
* @throws IOException
*/
public void add(Put p) throws IOException {
internalAdd(p);
}
/**
* Add a {@link Delete} operation to the list of mutations
* @param d The {@link Delete} to add
* @throws IOException
*/
public void add(Delete d) throws IOException {
internalAdd(d);
}
private void internalAdd(Mutation m) throws IOException {
int res = Bytes.compareTo(this.row, m.getRow());
if(res != 0) {
throw new IOException("The row in the recently added Put/Delete " +
Bytes.toStringBinary(m.getRow()) + " doesn't match the original one " +
Bytes.toStringBinary(this.row));
}
mutations.add(m);
}
@Override
public void readFields(final DataInput in) throws IOException {
int version = in.readByte();
if (version > VERSION) {
throw new IOException("version not supported");
}
this.row = Bytes.readByteArray(in);
int numMutations = in.readInt();
mutations.clear();
for(int i = 0; i < numMutations; i++) {
mutations.add((Mutation) HbaseObjectWritable.readObject(in, null));
}
}
@Override
public void write(final DataOutput out) throws IOException {
out.writeByte(VERSION);
Bytes.writeByteArray(out, this.row);
out.writeInt(mutations.size());
for (Mutation m : mutations) {
HbaseObjectWritable.writeObject(out, m, m.getClass(), null);
}
}
@Override
public int compareTo(Row i) {
return Bytes.compareTo(this.getRow(), i.getRow());
}
@Override
public byte[] getRow() {
return row;
}
/**
* @return An unmodifiable list of the current mutations.
*/
public List<Mutation> getMutations() {
return Collections.unmodifiableList(mutations);
}
}

View File

@ -503,7 +503,7 @@ public abstract class CoprocessorHost<E extends CoprocessorEnvironment> {
} }
@Override @Override
public void mutateRow(RowMutation rm) throws IOException { public void mutateRow(RowMutations rm) throws IOException {
table.mutateRow(rm); table.mutateRow(rm);
} }
} }

View File

@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.client.MultiResponse;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowMutation; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.Exec; import org.apache.hadoop.hbase.client.coprocessor.Exec;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
@ -257,7 +257,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
addToMap(Append.class, code++); addToMap(Append.class, code++);
addToMap(RowMutation.class, code++); addToMap(RowMutations.class, code++);
addToMap(Message.class, code++); addToMap(Message.class, code++);

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.RowMutation; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
@ -263,7 +263,7 @@ public interface HRegionInterface extends VersionedProtocol, Stoppable, Abortabl
byte [] family, byte [] qualifier, long amount, boolean writeToWAL) byte [] family, byte [] qualifier, long amount, boolean writeToWAL)
throws IOException; throws IOException;
public void mutateRow(byte[] regionName, RowMutation rm) public void mutateRow(byte[] regionName, RowMutations rm)
throws IOException; throws IOException;
/** /**

View File

@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.RowMutation; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
@ -4149,7 +4149,7 @@ public class HRegion implements HeapSize { // , Writable{
return results; return results;
} }
public void mutateRow(RowMutation rm) throws IOException { public void mutateRow(RowMutations rm) throws IOException {
mutateRowsWithLocks(rm.getMutations(), Collections.singleton(rm.getRow())); mutateRowsWithLocks(rm.getMutations(), Collections.singleton(rm.getRow()));
} }

View File

@ -83,7 +83,7 @@ import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.catalog.RootLocationEditor; import org.apache.hadoop.hbase.catalog.RootLocationEditor;
import org.apache.hadoop.hbase.client.Action; import org.apache.hadoop.hbase.client.Action;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.RowMutation; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HConnectionManager;
@ -3156,7 +3156,7 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler,
} }
@Override @Override
public void mutateRow(byte[] regionName, RowMutation rm) public void mutateRow(byte[] regionName, RowMutations rm)
throws IOException { throws IOException {
checkOpen(); checkOpen();
if (regionName == null) { if (regionName == null) {
@ -3321,8 +3321,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler,
} else if (action instanceof Append) { } else if (action instanceof Append) {
response.add(regionName, originalIndex, response.add(regionName, originalIndex,
append(regionName, (Append)action)); append(regionName, (Append)action));
} else if (action instanceof RowMutation) { } else if (action instanceof RowMutations) {
mutateRow(regionName, (RowMutation)action); mutateRow(regionName, (RowMutations)action);
response.add(regionName, originalIndex, new Result()); response.add(regionName, originalIndex, new Result());
} else { } else {
LOG.debug("Error: invalid Action, row must be a Get, Delete, " + LOG.debug("Error: invalid Action, row must be a Get, Delete, " +

View File

@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.RowMutation; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTableInterface;
@ -650,7 +650,7 @@ public class RemoteHTable implements HTableInterface {
} }
@Override @Override
public void mutateRow(RowMutation rm) throws IOException { public void mutateRow(RowMutations rm) throws IOException {
throw new IOException("atomicMutation not supported"); throw new IOException("atomicMutation not supported");
} }
} }

View File

@ -4078,7 +4078,7 @@ public class TestFromClientSide {
byte [][] QUALIFIERS = new byte [][] { byte [][] QUALIFIERS = new byte [][] {
Bytes.toBytes("a"), Bytes.toBytes("b") Bytes.toBytes("a"), Bytes.toBytes("b")
}; };
RowMutation arm = new RowMutation(ROW); RowMutations arm = new RowMutations(ROW);
Put p = new Put(ROW); Put p = new Put(ROW);
p.add(FAMILY, QUALIFIERS[0], VALUE); p.add(FAMILY, QUALIFIERS[0], VALUE);
arm.add(p); arm.add(p);
@ -4088,7 +4088,7 @@ public class TestFromClientSide {
Result r = t.get(g); Result r = t.get(g);
assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIERS[0]))); assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIERS[0])));
arm = new RowMutation(ROW); arm = new RowMutations(ROW);
p = new Put(ROW); p = new Put(ROW);
p.add(FAMILY, QUALIFIERS[1], VALUE); p.add(FAMILY, QUALIFIERS[1], VALUE);
arm.add(p); arm.add(p);

View File

@ -151,7 +151,7 @@ public class TestRegionObserverInterface {
delete.deleteColumn(B, B); delete.deleteColumn(B, B);
delete.deleteColumn(C, C); delete.deleteColumn(C, C);
RowMutation arm = new RowMutation(ROW); RowMutations arm = new RowMutations(ROW);
arm.add(put); arm.add(put);
arm.add(delete); arm.add(delete);
table.mutateRow(arm); table.mutateRow(arm);

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.RowMutation; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -280,7 +280,7 @@ public class TestAtomicOperation extends HBaseTestCase {
region.flushcache(); region.flushcache();
} }
long ts = timeStamps.incrementAndGet(); long ts = timeStamps.incrementAndGet();
RowMutation rm = new RowMutation(row); RowMutations rm = new RowMutations(row);
if (op) { if (op) {
Put p = new Put(row, ts); Put p = new Put(row, ts);
p.add(fam1, qual1, value1); p.add(fam1, qual1, value1);