From fb72852f65156f72d9733128b8533345802c9142 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Mon, 30 Sep 2013 16:52:59 +0000 Subject: [PATCH] HBASE-9612 Ability to batch edits destined to different regions git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1527648 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/client/ClientSmallScanner.java | 1 + .../hadoop/hbase/client/HBaseAdmin.java | 1 + .../apache/hadoop/hbase/client/HTable.java | 11 +- .../hbase/client/MultiServerCallable.java | 10 +- .../hadoop/hbase/client/ScannerCallable.java | 1 + .../ipc/PayloadCarryingRpcController.java | 34 +- .../apache/hadoop/hbase/ipc/RpcClient.java | 18 +- .../hadoop/hbase/protobuf/ProtobufUtil.java | 10 +- .../hbase/protobuf/RequestConverter.java | 68 +- .../hbase/protobuf/ResponseConverter.java | 43 +- hbase-protocol/README.txt | 1 - .../protobuf/generated/ClientProtos.java | 5159 ++++++++++------- .../hbase/protobuf/generated/RPCProtos.java | 144 +- hbase-protocol/src/main/protobuf/Client.proto | 43 +- hbase-protocol/src/main/protobuf/RPC.proto | 4 +- .../AnnotationReadingPriorityFunction.java | 14 +- .../hadoop/hbase/regionserver/HRegion.java | 10 +- .../hbase/regionserver/HRegionServer.java | 228 +- .../regionserver/wal/WALEditsReplaySink.java | 18 +- .../hbase/regionserver/TestQosFunction.java | 14 +- 20 files changed, 3373 insertions(+), 2459 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java index 0b895440376..a17be55bb3e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java @@ -171,6 +171,7 @@ public class ClientSmallScanner extends ClientScanner { ScanResponse response = null; PayloadCarryingRpcController controller = new PayloadCarryingRpcController(); try { + controller.setPriority(getTableName()); response = getStub().scan(controller, request); return ResponseConverter.getResults(controller.cellScanner(), response); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 15f2488cd69..82d44b9059e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -637,6 +637,7 @@ public class HBaseAdmin implements Abortable, Closeable { .getServerName()); PayloadCarryingRpcController controller = new PayloadCarryingRpcController(); try { + controller.setPriority(tableName); ScanResponse response = server.scan(controller, request); values = ResponseConverter.getResults(controller.cellScanner(), response); } catch (ServiceException se) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index c49d41ba025..9c566fa2052 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -961,6 +961,8 @@ public class HTable implements HTableInterface { try { MultiRequest request = RequestConverter.buildMultiRequest( getLocation().getRegionInfo().getRegionName(), rm); + PayloadCarryingRpcController pcrc = new PayloadCarryingRpcController(); + pcrc.setPriority(tableName); getStub().multi(null, request); } catch (ServiceException se) { throw ProtobufUtil.getRemoteException(se); @@ -987,6 +989,7 @@ public class HTable implements HTableInterface { MutateRequest request = RequestConverter.buildMutateRequest( getLocation().getRegionInfo().getRegionName(), append); PayloadCarryingRpcController rpcController = new PayloadCarryingRpcController(); + rpcController.setPriority(getTableName()); MutateResponse response = getStub().mutate(rpcController, request); if (!response.hasResult()) return null; return ProtobufUtil.toResult(response.getResult(), rpcController.cellScanner()); @@ -1013,9 +1016,10 @@ public class HTable implements HTableInterface { try { MutateRequest request = RequestConverter.buildMutateRequest( getLocation().getRegionInfo().getRegionName(), increment); - PayloadCarryingRpcController rpcContoller = new PayloadCarryingRpcController(); - MutateResponse response = getStub().mutate(rpcContoller, request); - return ProtobufUtil.toResult(response.getResult(), rpcContoller.cellScanner()); + PayloadCarryingRpcController rpcController = new PayloadCarryingRpcController(); + rpcController.setPriority(getTableName()); + MutateResponse response = getStub().mutate(rpcController, request); + return ProtobufUtil.toResult(response.getResult(), rpcController.cellScanner()); } catch (ServiceException se) { throw ProtobufUtil.getRemoteException(se); } @@ -1074,6 +1078,7 @@ public class HTable implements HTableInterface { getLocation().getRegionInfo().getRegionName(), row, family, qualifier, amount, durability); PayloadCarryingRpcController rpcController = new PayloadCarryingRpcController(); + rpcController.setPriority(getTableName()); MutateResponse response = getStub().mutate(rpcController, request); Result result = ProtobufUtil.toResult(response.getResult(), rpcController.cellScanner()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java index 2ce61181e49..cabe5471b2a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java @@ -24,8 +24,8 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; @@ -84,8 +84,11 @@ class MultiServerCallable extends RegionServerCallable { multiRequest = RequestConverter.buildMultiRequest(regionName, rms); } // Carry the cells if any over the proxy/pb Service interface using the payload - // carrying rpc controller. - getStub().multi(new PayloadCarryingRpcController(cells), multiRequest); + // carrying rpc controller. Also set priority on this controller so available down + // in RpcClient when we go to craft the request header. + PayloadCarryingRpcController pcrc = new PayloadCarryingRpcController(cells); + pcrc.setPriority(getTableName()); + getStub().multi(pcrc, multiRequest); // This multi call does not return results. response.add(regionName, action.getOriginalIndex(), Result.EMPTY_RESULT); } catch (ServiceException se) { @@ -113,6 +116,7 @@ class MultiServerCallable extends RegionServerCallable { // Controller optionally carries cell data over the proxy/service boundary and also // optionally ferries cell response data back out again. PayloadCarryingRpcController controller = new PayloadCarryingRpcController(cells); + controller.setPriority(getTableName()); ClientProtos.MultiResponse responseProto = getStub().multi(controller, multiRequest); results = ResponseConverter.getResults(responseProto, controller.cellScanner()); } catch (ServiceException se) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 5df1771e255..3f25fcb91dc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -163,6 +163,7 @@ public class ScannerCallable extends RegionServerCallable { ScanResponse response = null; PayloadCarryingRpcController controller = new PayloadCarryingRpcController(); try { + controller.setPriority(getTableName()); response = getStub().scan(controller, request); // Client and RS maintain a nextCallSeq number during the scan. Every next() call // from client to server will increment this number in both sides. Client passes this diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/PayloadCarryingRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/PayloadCarryingRpcController.java index d3bc83196c5..1f9b4bb5d81 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/PayloadCarryingRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/PayloadCarryingRpcController.java @@ -23,7 +23,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; - +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -36,6 +37,15 @@ import com.google.protobuf.RpcController; */ @InterfaceAudience.Private public class PayloadCarryingRpcController implements RpcController, CellScannable { + /** + * Priority to set on this request. Set it here in controller so available composing the + * request. This is the ordained way of setting priorities going forward. We will be + * undoing the old annotation-based mechanism. + */ + // Currently only multi call makes use of this. Eventually this should be only way to set + // priority. + private int priority = 0; + // TODO: Fill out the rest of this class methods rather than return UnsupportedOperationException /** @@ -103,4 +113,26 @@ public class PayloadCarryingRpcController implements RpcController, CellScannabl public void startCancel() { throw new UnsupportedOperationException(); } + + /** + * @param priority Priority for this request; should fall roughly in the range + * {@link HConstants#NORMAL_QOS} to {@link HConstants#HIGH_QOS} + */ + public void setPriority(int priority) { + this.priority = priority; + } + + /** + * @param tn Set priority based off the table we are going against. + */ + public void setPriority(final TableName tn) { + this.priority = tn != null && tn.isSystemTable()? HConstants.HIGH_QOS: HConstants.NORMAL_QOS; + } + + /** + * @return The priority of this request + */ + public int getPriority() { + return priority; + } } \ No newline at end of file diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java index 5d607a67f19..1686a65e066 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java @@ -1002,9 +1002,10 @@ public class RpcClient { * Note: this is not called from the Connection thread, but by other * threads. * @param call + * @param priority * @see #readResponse() */ - protected void writeRequest(Call call) { + protected void writeRequest(Call call, final int priority) { if (shouldCloseConnection.get()) return; try { RequestHeader.Builder builder = RequestHeader.newBuilder(); @@ -1022,6 +1023,8 @@ public class RpcClient { cellBlockBuilder.setLength(cellBlock.limit()); builder.setCellBlockMeta(cellBlockBuilder.build()); } + // Only pass priority if there one. Let zero be same as no priority. + if (priority != 0) builder.setPriority(priority); //noinspection SynchronizeOnNonFinalField RequestHeader header = builder.build(); synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC @@ -1380,6 +1383,12 @@ public class RpcClient { } } + Pair call(MethodDescriptor md, Message param, CellScanner cells, + Message returnType, User ticket, InetSocketAddress addr, int rpcTimeout) + throws InterruptedException, IOException { + return call(md, param, cells, returnType, ticket, addr, rpcTimeout, HConstants.NORMAL_QOS); + } + /** Make a call, passing param, to the IPC server running at * address which is servicing the protocol protocol, * with the ticket credentials, returning the value. @@ -1400,12 +1409,12 @@ public class RpcClient { */ Pair call(MethodDescriptor md, Message param, CellScanner cells, Message returnType, User ticket, InetSocketAddress addr, - int rpcTimeout) + int rpcTimeout, int priority) throws InterruptedException, IOException { Call call = new Call(md, param, cells, returnType); Connection connection = getConnection(ticket, call, addr, rpcTimeout, this.codec, this.compressor); - connection.writeRequest(call); // send the parameter + connection.writeRequest(call, priority); // send the parameter boolean interrupted = false; //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (call) { @@ -1632,7 +1641,8 @@ public class RpcClient { } Pair val = null; try { - val = call(md, param, cells, returnType, ticket, isa, rpcTimeout); + val = call(md, param, cells, returnType, ticket, isa, rpcTimeout, + pcrc != null? pcrc.getPriority(): HConstants.NORMAL_QOS); if (pcrc != null) { // Shove the results into controller so can be carried across the proxy/pb service void. if (val.getSecond() != null) pcrc.setCellScanner(val.getSecond()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 45a753ed732..bbdbd38afcf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -2257,11 +2257,11 @@ public final class ProtobufUtil { ", row=" + getStringForByteString(r.getGet().getRow()); } else if (m instanceof ClientProtos.MultiRequest) { ClientProtos.MultiRequest r = (ClientProtos.MultiRequest) m; - ClientProtos.MultiAction action = r.getActionList().get(0); - return "region= " + getStringForByteString(r.getRegion().getValue()) + - ", for " + r.getActionCount() + - " actions and 1st row key=" + getStringForByteString(action.hasMutation() ? - action.getMutation().getRow() : action.getGet().getRow()); + ClientProtos.RegionMutation rm = r.getRegionMutationList().get(0); + return "region= " + getStringForByteString(rm.getRegion().getValue()) + + ", for " + r.getRegionMutationCount() + + " actions and 1st row key=" + getStringForByteString(rm.getMutationCount() > 0? + rm.getMutation(0).getRow(): ByteString.EMPTY); } else if (m instanceof ClientProtos.MutateRequest) { ClientProtos.MutateRequest r = (ClientProtos.MutateRequest) m; return "region= " + getStringForByteString(r.getRegion().getValue()) + diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 0b580c230fc..19716852939 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -63,7 +63,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiGetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; @@ -71,6 +70,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; @@ -365,10 +365,11 @@ public final class RequestConverter { * @return a multi request * @throws IOException */ - public static MultiRequest buildMultiRequest(final byte[] regionName, + public static MultiRequest buildMultiRequest(final byte [] regionName, final RowMutations rowMutations) throws IOException { - MultiRequest.Builder builder = getMultiRequestBuilderWithRegionAndAtomicSet(regionName, true); + RegionMutation.Builder builder = + getRegionMutationBuilderWithRegionAndAtomicSet(regionName, true); for (Mutation mutation: rowMutations.getMutations()) { MutationType mutateType = null; if (mutation instanceof Put) { @@ -380,9 +381,9 @@ public final class RequestConverter { mutation.getClass().getName()); } MutationProto mp = ProtobufUtil.toMutation(mutateType, mutation); - builder.addAction(MultiAction.newBuilder().setMutation(mp).build()); + builder.addMutation(mp); } - return builder.build(); + return createMultiRequest(builder.build()); } /** @@ -398,7 +399,8 @@ public final class RequestConverter { public static MultiRequest buildNoDataMultiRequest(final byte[] regionName, final RowMutations rowMutations, final List cells) throws IOException { - MultiRequest.Builder builder = getMultiRequestBuilderWithRegionAndAtomicSet(regionName, true); + RegionMutation.Builder builder = + getRegionMutationBuilderWithRegionAndAtomicSet(regionName, true); for (Mutation mutation: rowMutations.getMutations()) { MutationType type = null; if (mutation instanceof Put) { @@ -411,14 +413,18 @@ public final class RequestConverter { } MutationProto mp = ProtobufUtil.toMutationNoData(type, mutation); cells.add(mutation); - builder.addAction(MultiAction.newBuilder().setMutation(mp).build()); + builder.addMutation(mp); } - return builder.build(); + return createMultiRequest(builder.build()); } - private static MultiRequest.Builder getMultiRequestBuilderWithRegionAndAtomicSet(final byte [] regionName, - final boolean atomic) { - MultiRequest.Builder builder = MultiRequest.newBuilder(); + private static MultiRequest createMultiRequest(final RegionMutation rm) { + return MultiRequest.newBuilder().addRegionMutation(rm).build(); + } + + private static RegionMutation.Builder getRegionMutationBuilderWithRegionAndAtomicSet( + final byte [] regionName, final boolean atomic) { + RegionMutation.Builder builder = RegionMutation.newBuilder(); RegionSpecifier region = buildRegionSpecifier(RegionSpecifierType.REGION_NAME, regionName); builder.setRegion(region); return builder.setAtomic(atomic); @@ -520,29 +526,27 @@ public final class RequestConverter { public static MultiRequest buildMultiRequest(final byte[] regionName, final List> actions) throws IOException { - MultiRequest.Builder builder = getMultiRequestBuilderWithRegionAndAtomicSet(regionName, false); + RegionMutation.Builder builder = + getRegionMutationBuilderWithRegionAndAtomicSet(regionName, false); for (Action action: actions) { - MultiAction.Builder protoAction = MultiAction.newBuilder(); Row row = action.getAction(); if (row instanceof Get) { - protoAction.setGet(ProtobufUtil.toGet((Get)row)); + throw new UnsupportedOperationException("Removed"); } else if (row instanceof Put) { - protoAction.setMutation(ProtobufUtil.toMutation(MutationType.PUT, (Put)row)); + builder.addMutation(ProtobufUtil.toMutation(MutationType.PUT, (Put)row)); } else if (row instanceof Delete) { - protoAction.setMutation(ProtobufUtil.toMutation(MutationType.DELETE, (Delete)row)); + builder.addMutation(ProtobufUtil.toMutation(MutationType.DELETE, (Delete)row)); } else if (row instanceof Append) { - protoAction.setMutation(ProtobufUtil.toMutation(MutationType.APPEND, (Append)row)); + builder.addMutation(ProtobufUtil.toMutation(MutationType.APPEND, (Append)row)); } else if (row instanceof Increment) { - protoAction.setMutation(ProtobufUtil.toMutation((Increment)row)); + builder.addMutation(ProtobufUtil.toMutation((Increment)row)); } else if (row instanceof RowMutations) { continue; // ignore RowMutations } else { - throw new DoNotRetryIOException( - "multi doesn't support " + row.getClass().getName()); + throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName()); } - builder.addAction(protoAction.build()); } - return builder.build(); + return createMultiRequest(builder.build()); } /** @@ -564,17 +568,16 @@ public final class RequestConverter { public static MultiRequest buildNoDataMultiRequest(final byte[] regionName, final List> actions, final List cells) throws IOException { - MultiRequest.Builder builder = getMultiRequestBuilderWithRegionAndAtomicSet(regionName, false); + RegionMutation.Builder builder = + getRegionMutationBuilderWithRegionAndAtomicSet(regionName, false); for (Action action: actions) { - MultiAction.Builder protoAction = MultiAction.newBuilder(); Row row = action.getAction(); if (row instanceof Get) { - // Gets are carried by protobufs. - protoAction.setGet(ProtobufUtil.toGet((Get)row)); + throw new UnsupportedOperationException("Removed"); } else if (row instanceof Put) { Put p = (Put)row; cells.add(p); - protoAction.setMutation(ProtobufUtil.toMutationNoData(MutationType.PUT, p)); + builder.addMutation(ProtobufUtil.toMutationNoData(MutationType.PUT, p)); } else if (row instanceof Delete) { Delete d = (Delete)row; int size = d.size(); @@ -585,26 +588,25 @@ public final class RequestConverter { // metadata only in the pb and then send the kv along the side in cells. if (size > 0) { cells.add(d); - protoAction.setMutation(ProtobufUtil.toMutationNoData(MutationType.DELETE, d)); + builder.addMutation(ProtobufUtil.toMutationNoData(MutationType.DELETE, d)); } else { - protoAction.setMutation(ProtobufUtil.toMutation(MutationType.DELETE, d)); + builder.addMutation(ProtobufUtil.toMutation(MutationType.DELETE, d)); } } else if (row instanceof Append) { Append a = (Append)row; cells.add(a); - protoAction.setMutation(ProtobufUtil.toMutationNoData(MutationType.APPEND, a)); + builder.addMutation(ProtobufUtil.toMutationNoData(MutationType.APPEND, a)); } else if (row instanceof Increment) { Increment i = (Increment)row; cells.add(i); - protoAction.setMutation(ProtobufUtil.toMutationNoData(MutationType.INCREMENT, i)); + builder.addMutation(ProtobufUtil.toMutationNoData(MutationType.INCREMENT, i)); } else if (row instanceof RowMutations) { continue; // ignore RowMutations } else { throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName()); } - builder.addAction(protoAction.build()); } - return builder.build(); + return createMultiRequest(builder.build()); } // End utilities for Client diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java index 29faf1a65f0..1e8cbf924b9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java @@ -38,9 +38,10 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRespo import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse; @@ -73,20 +74,20 @@ public final class ResponseConverter { * @return the results that were in the MultiResponse (a Result or an Exception). * @throws IOException */ - public static List getResults(final ClientProtos.MultiResponse proto, + public static List getResults(final MultiResponse proto, final CellScanner cells) throws IOException { List results = new ArrayList(); - List resultList = proto.getResultList(); - for (int i = 0, n = resultList.size(); i < n; i++) { - ActionResult result = resultList.get(i); - if (result.hasException()) { - results.add(ProtobufUtil.toException(result.getException())); - } else if (result.hasValue()) { - ClientProtos.Result value = result.getValue(); - results.add(ProtobufUtil.toResult(value, cells)); - } else { - results.add(new Result()); + for (RegionMutationResult result: proto.getRegionMutationResultList()) { + for (ResultOrException resultOrException: result.getResultOrExceptionList()) { + if (resultOrException.hasException()) { + results.add(ProtobufUtil.toException(resultOrException.getException())); + } else if (resultOrException.hasResult()) { + results.add(ProtobufUtil.toResult(resultOrException.getResult(), cells)); + } else { + // Just a placeholder + results.add(new Result()); + } } } return results; @@ -98,14 +99,22 @@ public final class ResponseConverter { * @param t * @return an action result */ - public static ActionResult buildActionResult(final Throwable t) { - ActionResult.Builder builder = ActionResult.newBuilder(); + public static ResultOrException buildActionResult(final Throwable t) { + ResultOrException.Builder builder = ResultOrException.newBuilder(); + builder.setException(buildException(t)); + return builder.build(); + } + + /** + * @param t + * @return NameValuePair of the exception name to stringified version os exception. + */ + public static NameBytesPair buildException(final Throwable t) { NameBytesPair.Builder parameterBuilder = NameBytesPair.newBuilder(); parameterBuilder.setName(t.getClass().getName()); parameterBuilder.setValue( ByteString.copyFromUtf8(StringUtils.stringifyException(t))); - builder.setException(parameterBuilder.build()); - return builder.build(); + return parameterBuilder.build(); } /** diff --git a/hbase-protocol/README.txt b/hbase-protocol/README.txt index 24e2b75b608..b8e21ae0c6c 100644 --- a/hbase-protocol/README.txt +++ b/hbase-protocol/README.txt @@ -25,7 +25,6 @@ terminal and hit return -- the protoc compiler runs fast): do protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE done - ll $JAVA_DIR/org/apache/hadoop/hbase/protobuf/generated After you've done the above, check it in and then check it in (or post a patch on a JIRA with your definition file changes and the generated files). diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java index 838c35b2777..e702defc08f 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -23667,1567 +23667,7 @@ public final class ClientProtos { // @@protoc_insertion_point(class_scope:CoprocessorServiceResponse) } - public interface MultiActionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .MutationProto mutation = 1; - /** - * optional .MutationProto mutation = 1; - */ - boolean hasMutation(); - /** - * optional .MutationProto mutation = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(); - /** - * optional .MutationProto mutation = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); - - // optional .Get get = 2; - /** - * optional .Get get = 2; - */ - boolean hasGet(); - /** - * optional .Get get = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); - /** - * optional .Get get = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); - } - /** - * Protobuf type {@code MultiAction} - * - *
-   **
-   * An action that is part of MultiRequest.
-   * This is a union type - exactly one of the fields will be set.
-   * 
- */ - public static final class MultiAction extends - com.google.protobuf.GeneratedMessage - implements MultiActionOrBuilder { - // Use MultiAction.newBuilder() to construct. - private MultiAction(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiAction defaultInstance; - public static MultiAction getDefaultInstance() { - return defaultInstance; - } - - public MultiAction getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private MultiAction( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = mutation_.toBuilder(); - } - mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(mutation_); - mutation_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = get_.toBuilder(); - } - get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(get_); - get_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiAction parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiAction(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional .MutationProto mutation = 1; - public static final int MUTATION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_; - /** - * optional .MutationProto mutation = 1; - */ - public boolean hasMutation() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .MutationProto mutation = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { - return mutation_; - } - /** - * optional .MutationProto mutation = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { - return mutation_; - } - - // optional .Get get = 2; - public static final int GET_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; - /** - * optional .Get get = 2; - */ - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .Get get = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { - return get_; - } - /** - * optional .Get get = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - return get_; - } - - private void initFields() { - mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasMutation()) { - if (!getMutation().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasGet()) { - if (!getGet().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, mutation_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, get_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, mutation_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, get_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) obj; - - boolean result = true; - result = result && (hasMutation() == other.hasMutation()); - if (hasMutation()) { - result = result && getMutation() - .equals(other.getMutation()); - } - result = result && (hasGet() == other.hasGet()); - if (hasGet()) { - result = result && getGet() - .equals(other.getGet()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMutation()) { - hash = (37 * hash) + MUTATION_FIELD_NUMBER; - hash = (53 * hash) + getMutation().hashCode(); - } - if (hasGet()) { - hash = (37 * hash) + GET_FIELD_NUMBER; - hash = (53 * hash) + getGet().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code MultiAction} - * - *
-     **
-     * An action that is part of MultiRequest.
-     * This is a union type - exactly one of the fields will be set.
-     * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMutationFieldBuilder(); - getGetFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (mutationBuilder_ == null) { - mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - } else { - mutationBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiAction_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (mutationBuilder_ == null) { - result.mutation_ = mutation_; - } else { - result.mutation_ = mutationBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (getBuilder_ == null) { - result.get_ = get_; - } else { - result.get_ = getBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()) return this; - if (other.hasMutation()) { - mergeMutation(other.getMutation()); - } - if (other.hasGet()) { - mergeGet(other.getGet()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasMutation()) { - if (!getMutation().isInitialized()) { - - return false; - } - } - if (hasGet()) { - if (!getGet().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional .MutationProto mutation = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; - /** - * optional .MutationProto mutation = 1; - */ - public boolean hasMutation() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .MutationProto mutation = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { - if (mutationBuilder_ == null) { - return mutation_; - } else { - return mutationBuilder_.getMessage(); - } - } - /** - * optional .MutationProto mutation = 1; - */ - public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { - if (mutationBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - mutation_ = value; - onChanged(); - } else { - mutationBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .MutationProto mutation = 1; - */ - public Builder setMutation( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { - if (mutationBuilder_ == null) { - mutation_ = builderForValue.build(); - onChanged(); - } else { - mutationBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .MutationProto mutation = 1; - */ - public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { - if (mutationBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) { - mutation_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial(); - } else { - mutation_ = value; - } - onChanged(); - } else { - mutationBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .MutationProto mutation = 1; - */ - public Builder clearMutation() { - if (mutationBuilder_ == null) { - mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); - onChanged(); - } else { - mutationBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * optional .MutationProto mutation = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getMutationFieldBuilder().getBuilder(); - } - /** - * optional .MutationProto mutation = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { - if (mutationBuilder_ != null) { - return mutationBuilder_.getMessageOrBuilder(); - } else { - return mutation_; - } - } - /** - * optional .MutationProto mutation = 1; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> - getMutationFieldBuilder() { - if (mutationBuilder_ == null) { - mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( - mutation_, - getParentForChildren(), - isClean()); - mutation_ = null; - } - return mutationBuilder_; - } - - // optional .Get get = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; - /** - * optional .Get get = 2; - */ - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .Get get = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { - if (getBuilder_ == null) { - return get_; - } else { - return getBuilder_.getMessage(); - } - } - /** - * optional .Get get = 2; - */ - public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { - if (getBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - get_ = value; - onChanged(); - } else { - getBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .Get get = 2; - */ - public Builder setGet( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { - if (getBuilder_ == null) { - get_ = builderForValue.build(); - onChanged(); - } else { - getBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .Get get = 2; - */ - public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { - if (getBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { - get_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); - } else { - get_ = value; - } - onChanged(); - } else { - getBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .Get get = 2; - */ - public Builder clearGet() { - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); - onChanged(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - /** - * optional .Get get = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getGetFieldBuilder().getBuilder(); - } - /** - * optional .Get get = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { - if (getBuilder_ != null) { - return getBuilder_.getMessageOrBuilder(); - } else { - return get_; - } - } - /** - * optional .Get get = 2; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> - getGetFieldBuilder() { - if (getBuilder_ == null) { - getBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( - get_, - getParentForChildren(), - isClean()); - get_ = null; - } - return getBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MultiAction) - } - - static { - defaultInstance = new MultiAction(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiAction) - } - - public interface ActionResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .Result value = 1; - /** - * optional .Result value = 1; - */ - boolean hasValue(); - /** - * optional .Result value = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue(); - /** - * optional .Result value = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder(); - - // optional .NameBytesPair exception = 2; - /** - * optional .NameBytesPair exception = 2; - */ - boolean hasException(); - /** - * optional .NameBytesPair exception = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); - /** - * optional .NameBytesPair exception = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); - } - /** - * Protobuf type {@code ActionResult} - * - *
-   **
-   * An individual action result. The result will in the
-   * same order as the action in the request. If an action
-   * returns a value, it is set in value field. If it doesn't
-   * return anything, the result will be empty. If an action
-   * fails to execute due to any exception, the exception
-   * is returned as a stringified parameter.
-   * 
- */ - public static final class ActionResult extends - com.google.protobuf.GeneratedMessage - implements ActionResultOrBuilder { - // Use ActionResult.newBuilder() to construct. - private ActionResult(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ActionResult defaultInstance; - public static ActionResult getDefaultInstance() { - return defaultInstance; - } - - public ActionResult getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ActionResult( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = value_.toBuilder(); - } - value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(value_); - value_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = exception_.toBuilder(); - } - exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(exception_); - exception_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ActionResult parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ActionResult(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional .Result value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_; - /** - * optional .Result value = 1; - */ - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .Result value = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() { - return value_; - } - /** - * optional .Result value = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() { - return value_; - } - - // optional .NameBytesPair exception = 2; - public static final int EXCEPTION_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; - /** - * optional .NameBytesPair exception = 2; - */ - public boolean hasException() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .NameBytesPair exception = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { - return exception_; - } - /** - * optional .NameBytesPair exception = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { - return exception_; - } - - private void initFields() { - value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasException()) { - if (!getException().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, value_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, exception_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, value_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, exception_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) obj; - - boolean result = true; - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && (hasException() == other.hasException()); - if (hasException()) { - result = result && getException() - .equals(other.getException()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - if (hasException()) { - hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; - hash = (53 * hash) + getException().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ActionResult} - * - *
-     **
-     * An individual action result. The result will in the
-     * same order as the action in the request. If an action
-     * returns a value, it is set in value field. If it doesn't
-     * return anything, the result will be empty. If an action
-     * fails to execute due to any exception, the exception
-     * is returned as a stringified parameter.
-     * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getValueFieldBuilder(); - getExceptionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - } else { - exceptionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (valueBuilder_ == null) { - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (exceptionBuilder_ == null) { - result.exception_ = exception_; - } else { - result.exception_ = exceptionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()) return this; - if (other.hasValue()) { - mergeValue(other.getValue()); - } - if (other.hasException()) { - mergeException(other.getException()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasException()) { - if (!getException().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional .Result value = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> valueBuilder_; - /** - * optional .Result value = 1; - */ - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .Result value = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() { - if (valueBuilder_ == null) { - return value_; - } else { - return valueBuilder_.getMessage(); - } - } - /** - * optional .Result value = 1; - */ - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - value_ = value; - onChanged(); - } else { - valueBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .Result value = 1; - */ - public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); - onChanged(); - } else { - valueBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .Result value = 1; - */ - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - value_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { - value_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(value_).mergeFrom(value).buildPartial(); - } else { - value_ = value; - } - onChanged(); - } else { - valueBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .Result value = 1; - */ - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); - onChanged(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * optional .Result value = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getValueBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getValueFieldBuilder().getBuilder(); - } - /** - * optional .Result value = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); - } else { - return value_; - } - } - /** - * optional .Result value = 1; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( - value_, - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - // optional .NameBytesPair exception = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; - /** - * optional .NameBytesPair exception = 2; - */ - public boolean hasException() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .NameBytesPair exception = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { - if (exceptionBuilder_ == null) { - return exception_; - } else { - return exceptionBuilder_.getMessage(); - } - } - /** - * optional .NameBytesPair exception = 2; - */ - public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (exceptionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - exception_ = value; - onChanged(); - } else { - exceptionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .NameBytesPair exception = 2; - */ - public Builder setException( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { - if (exceptionBuilder_ == null) { - exception_ = builderForValue.build(); - onChanged(); - } else { - exceptionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .NameBytesPair exception = 2; - */ - public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { - if (exceptionBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { - exception_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); - } else { - exception_ = value; - } - onChanged(); - } else { - exceptionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .NameBytesPair exception = 2; - */ - public Builder clearException() { - if (exceptionBuilder_ == null) { - exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); - onChanged(); - } else { - exceptionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - /** - * optional .NameBytesPair exception = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getExceptionFieldBuilder().getBuilder(); - } - /** - * optional .NameBytesPair exception = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { - if (exceptionBuilder_ != null) { - return exceptionBuilder_.getMessageOrBuilder(); - } else { - return exception_; - } - } - /** - * optional .NameBytesPair exception = 2; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> - getExceptionFieldBuilder() { - if (exceptionBuilder_ == null) { - exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( - exception_, - getParentForChildren(), - isClean()); - exception_ = null; - } - return exceptionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ActionResult) - } - - static { - defaultInstance = new ActionResult(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ActionResult) - } - - public interface MultiRequestOrBuilder + public interface RegionMutationOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .RegionSpecifier region = 1; @@ -25244,69 +23684,73 @@ public final class ClientProtos { */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - // repeated .MultiAction action = 2; + // optional bool atomic = 2; /** - * repeated .MultiAction action = 2; - */ - java.util.List - getActionList(); - /** - * repeated .MultiAction action = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index); - /** - * repeated .MultiAction action = 2; - */ - int getActionCount(); - /** - * repeated .MultiAction action = 2; - */ - java.util.List - getActionOrBuilderList(); - /** - * repeated .MultiAction action = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( - int index); - - // optional bool atomic = 3; - /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+     * When set, run mutations as atomic unit.
+     * 
*/ boolean hasAtomic(); /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+     * When set, run mutations as atomic unit.
+     * 
*/ boolean getAtomic(); + + // repeated .MutationProto mutation = 3; + /** + * repeated .MutationProto mutation = 3; + */ + java.util.List + getMutationList(); + /** + * repeated .MutationProto mutation = 3; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(int index); + /** + * repeated .MutationProto mutation = 3; + */ + int getMutationCount(); + /** + * repeated .MutationProto mutation = 3; + */ + java.util.List + getMutationOrBuilderList(); + /** + * repeated .MutationProto mutation = 3; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder( + int index); } /** - * Protobuf type {@code MultiRequest} + * Protobuf type {@code RegionMutation} * *
    **
-   * You can execute a list of actions on a given region in order.
-   *
-   * If it is a list of mutate actions, atomic can be set
-   * to make sure they can be processed atomically, just like
-   * RowMutations.
+   * Mutations to run against a Region.
    * 
*/ - public static final class MultiRequest extends + public static final class RegionMutation extends com.google.protobuf.GeneratedMessage - implements MultiRequestOrBuilder { - // Use MultiRequest.newBuilder() to construct. - private MultiRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements RegionMutationOrBuilder { + // Use RegionMutation.newBuilder() to construct. + private RegionMutation(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private RegionMutation(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final MultiRequest defaultInstance; - public static MultiRequest getDefaultInstance() { + private static final RegionMutation defaultInstance; + public static RegionMutation getDefaultInstance() { return defaultInstance; } - public MultiRequest getDefaultInstanceForType() { + public RegionMutation getDefaultInstanceForType() { return defaultInstance; } @@ -25316,7 +23760,7 @@ public final class ClientProtos { getUnknownFields() { return this.unknownFields; } - private MultiRequest( + private RegionMutation( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -25352,19 +23796,19 @@ public final class ClientProtos { bitField0_ |= 0x00000001; break; } - case 18: { - if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - action_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000002; - } - action_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.PARSER, extensionRegistry)); - break; - } - case 24: { + case 16: { bitField0_ |= 0x00000002; atomic_ = input.readBool(); break; } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + mutation_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + mutation_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry)); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -25373,8 +23817,8 @@ public final class ClientProtos { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - action_ = java.util.Collections.unmodifiableList(action_); + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + mutation_ = java.util.Collections.unmodifiableList(mutation_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -25382,28 +23826,28 @@ public final class ClientProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutation_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionMutation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiRequest(input, extensionRegistry); + return new RegionMutation(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -25430,62 +23874,70 @@ public final class ClientProtos { return region_; } - // repeated .MultiAction action = 2; - public static final int ACTION_FIELD_NUMBER = 2; - private java.util.List action_; - /** - * repeated .MultiAction action = 2; - */ - public java.util.List getActionList() { - return action_; - } - /** - * repeated .MultiAction action = 2; - */ - public java.util.List - getActionOrBuilderList() { - return action_; - } - /** - * repeated .MultiAction action = 2; - */ - public int getActionCount() { - return action_.size(); - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { - return action_.get(index); - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( - int index) { - return action_.get(index); - } - - // optional bool atomic = 3; - public static final int ATOMIC_FIELD_NUMBER = 3; + // optional bool atomic = 2; + public static final int ATOMIC_FIELD_NUMBER = 2; private boolean atomic_; /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+     * When set, run mutations as atomic unit.
+     * 
*/ public boolean hasAtomic() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+     * When set, run mutations as atomic unit.
+     * 
*/ public boolean getAtomic() { return atomic_; } + // repeated .MutationProto mutation = 3; + public static final int MUTATION_FIELD_NUMBER = 3; + private java.util.List mutation_; + /** + * repeated .MutationProto mutation = 3; + */ + public java.util.List getMutationList() { + return mutation_; + } + /** + * repeated .MutationProto mutation = 3; + */ + public java.util.List + getMutationOrBuilderList() { + return mutation_; + } + /** + * repeated .MutationProto mutation = 3; + */ + public int getMutationCount() { + return mutation_.size(); + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(int index) { + return mutation_.get(index); + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder( + int index) { + return mutation_.get(index); + } + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - action_ = java.util.Collections.emptyList(); atomic_ = false; + mutation_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -25500,8 +23952,8 @@ public final class ClientProtos { memoizedIsInitialized = 0; return false; } - for (int i = 0; i < getActionCount(); i++) { - if (!getAction(i).isInitialized()) { + for (int i = 0; i < getMutationCount(); i++) { + if (!getMutation(i).isInitialized()) { memoizedIsInitialized = 0; return false; } @@ -25516,11 +23968,11 @@ public final class ClientProtos { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, region_); } - for (int i = 0; i < action_.size(); i++) { - output.writeMessage(2, action_.get(i)); - } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(3, atomic_); + output.writeBool(2, atomic_); + } + for (int i = 0; i < mutation_.size(); i++) { + output.writeMessage(3, mutation_.get(i)); } getUnknownFields().writeTo(output); } @@ -25535,13 +23987,13 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, region_); } - for (int i = 0; i < action_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, action_.get(i)); - } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, atomic_); + .computeBoolSize(2, atomic_); + } + for (int i = 0; i < mutation_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, mutation_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -25560,10 +24012,10 @@ public final class ClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -25571,13 +24023,13 @@ public final class ClientProtos { result = result && getRegion() .equals(other.getRegion()); } - result = result && getActionList() - .equals(other.getActionList()); result = result && (hasAtomic() == other.hasAtomic()); if (hasAtomic()) { result = result && (getAtomic() == other.getAtomic()); } + result = result && getMutationList() + .equals(other.getMutationList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -25595,66 +24047,66 @@ public final class ClientProtos { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } - if (getActionCount() > 0) { - hash = (37 * hash) + ACTION_FIELD_NUMBER; - hash = (53 * hash) + getActionList().hashCode(); - } if (hasAtomic()) { hash = (37 * hash) + ATOMIC_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getAtomic()); } + if (getMutationCount() > 0) { + hash = (37 * hash) + MUTATION_FIELD_NUMBER; + hash = (53 * hash) + getMutationList().hashCode(); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -25663,7 +24115,7 @@ public final class ClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -25675,33 +24127,29 @@ public final class ClientProtos { return builder; } /** - * Protobuf type {@code MultiRequest} + * Protobuf type {@code RegionMutation} * *
      **
-     * You can execute a list of actions on a given region in order.
-     *
-     * If it is a list of mutate actions, atomic can be set
-     * to make sure they can be processed atomically, just like
-     * RowMutations.
+     * Mutations to run against a Region.
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutation_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -25714,7 +24162,7 @@ public final class ClientProtos { private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); - getActionFieldBuilder(); + getMutationFieldBuilder(); } } private static Builder create() { @@ -25729,14 +24177,14 @@ public final class ClientProtos { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - if (actionBuilder_ == null) { - action_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - actionBuilder_.clear(); - } atomic_ = false; - bitField0_ = (bitField0_ & ~0x00000004); + bitField0_ = (bitField0_ & ~0x00000002); + if (mutationBuilder_ == null) { + mutation_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + mutationBuilder_.clear(); + } return this; } @@ -25746,23 +24194,23 @@ public final class ClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutation_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -25773,67 +24221,67 @@ public final class ClientProtos { } else { result.region_ = regionBuilder_.build(); } - if (actionBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - action_ = java.util.Collections.unmodifiableList(action_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.action_ = action_; - } else { - result.action_ = actionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.atomic_ = atomic_; + if (mutationBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + mutation_ = java.util.Collections.unmodifiableList(mutation_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.mutation_ = mutation_; + } else { + result.mutation_ = mutationBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } - if (actionBuilder_ == null) { - if (!other.action_.isEmpty()) { - if (action_.isEmpty()) { - action_ = other.action_; - bitField0_ = (bitField0_ & ~0x00000002); + if (other.hasAtomic()) { + setAtomic(other.getAtomic()); + } + if (mutationBuilder_ == null) { + if (!other.mutation_.isEmpty()) { + if (mutation_.isEmpty()) { + mutation_ = other.mutation_; + bitField0_ = (bitField0_ & ~0x00000004); } else { - ensureActionIsMutable(); - action_.addAll(other.action_); + ensureMutationIsMutable(); + mutation_.addAll(other.mutation_); } onChanged(); } } else { - if (!other.action_.isEmpty()) { - if (actionBuilder_.isEmpty()) { - actionBuilder_.dispose(); - actionBuilder_ = null; - action_ = other.action_; - bitField0_ = (bitField0_ & ~0x00000002); - actionBuilder_ = + if (!other.mutation_.isEmpty()) { + if (mutationBuilder_.isEmpty()) { + mutationBuilder_.dispose(); + mutationBuilder_ = null; + mutation_ = other.mutation_; + bitField0_ = (bitField0_ & ~0x00000004); + mutationBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getActionFieldBuilder() : null; + getMutationFieldBuilder() : null; } else { - actionBuilder_.addAllMessages(other.action_); + mutationBuilder_.addAllMessages(other.mutation_); } } } - if (other.hasAtomic()) { - setAtomic(other.getAtomic()); - } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -25847,8 +24295,8 @@ public final class ClientProtos { return false; } - for (int i = 0; i < getActionCount(); i++) { - if (!getAction(i).isInitialized()) { + for (int i = 0; i < getMutationCount(); i++) { + if (!getMutation(i).isInitialized()) { return false; } @@ -25860,11 +24308,11 @@ public final class ClientProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -25992,279 +24440,2529 @@ public final class ClientProtos { return regionBuilder_; } - // repeated .MultiAction action = 2; - private java.util.List action_ = - java.util.Collections.emptyList(); - private void ensureActionIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - action_ = new java.util.ArrayList(action_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> actionBuilder_; - - /** - * repeated .MultiAction action = 2; - */ - public java.util.List getActionList() { - if (actionBuilder_ == null) { - return java.util.Collections.unmodifiableList(action_); - } else { - return actionBuilder_.getMessageList(); - } - } - /** - * repeated .MultiAction action = 2; - */ - public int getActionCount() { - if (actionBuilder_ == null) { - return action_.size(); - } else { - return actionBuilder_.getCount(); - } - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction getAction(int index) { - if (actionBuilder_ == null) { - return action_.get(index); - } else { - return actionBuilder_.getMessage(index); - } - } - /** - * repeated .MultiAction action = 2; - */ - public Builder setAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { - if (actionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.set(index, value); - onChanged(); - } else { - actionBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder setAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.set(index, builderForValue.build()); - onChanged(); - } else { - actionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { - if (actionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.add(value); - onChanged(); - } else { - actionBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder addAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction value) { - if (actionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureActionIsMutable(); - action_.add(index, value); - onChanged(); - } else { - actionBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder addAction( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.add(builderForValue.build()); - onChanged(); - } else { - actionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder addAction( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder builderForValue) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.add(index, builderForValue.build()); - onChanged(); - } else { - actionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder addAllAction( - java.lang.Iterable values) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - super.addAll(values, action_); - onChanged(); - } else { - actionBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder clearAction() { - if (actionBuilder_ == null) { - action_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - actionBuilder_.clear(); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public Builder removeAction(int index) { - if (actionBuilder_ == null) { - ensureActionIsMutable(); - action_.remove(index); - onChanged(); - } else { - actionBuilder_.remove(index); - } - return this; - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder getActionBuilder( - int index) { - return getActionFieldBuilder().getBuilder(index); - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder getActionOrBuilder( - int index) { - if (actionBuilder_ == null) { - return action_.get(index); } else { - return actionBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .MultiAction action = 2; - */ - public java.util.List - getActionOrBuilderList() { - if (actionBuilder_ != null) { - return actionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(action_); - } - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder() { - return getActionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); - } - /** - * repeated .MultiAction action = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder addActionBuilder( - int index) { - return getActionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.getDefaultInstance()); - } - /** - * repeated .MultiAction action = 2; - */ - public java.util.List - getActionBuilderList() { - return getActionFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder> - getActionFieldBuilder() { - if (actionBuilder_ == null) { - actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiActionOrBuilder>( - action_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - action_ = null; - } - return actionBuilder_; - } - - // optional bool atomic = 3; + // optional bool atomic = 2; private boolean atomic_ ; /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+       * When set, run mutations as atomic unit.
+       * 
*/ public boolean hasAtomic() { - return ((bitField0_ & 0x00000004) == 0x00000004); + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+       * When set, run mutations as atomic unit.
+       * 
*/ public boolean getAtomic() { return atomic_; } /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+       * When set, run mutations as atomic unit.
+       * 
*/ public Builder setAtomic(boolean value) { - bitField0_ |= 0x00000004; + bitField0_ |= 0x00000002; atomic_ = value; onChanged(); return this; } /** - * optional bool atomic = 3; + * optional bool atomic = 2; + * + *
+       * When set, run mutations as atomic unit.
+       * 
*/ public Builder clearAtomic() { - bitField0_ = (bitField0_ & ~0x00000004); + bitField0_ = (bitField0_ & ~0x00000002); atomic_ = false; onChanged(); return this; } + // repeated .MutationProto mutation = 3; + private java.util.List mutation_ = + java.util.Collections.emptyList(); + private void ensureMutationIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + mutation_ = new java.util.ArrayList(mutation_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; + + /** + * repeated .MutationProto mutation = 3; + */ + public java.util.List getMutationList() { + if (mutationBuilder_ == null) { + return java.util.Collections.unmodifiableList(mutation_); + } else { + return mutationBuilder_.getMessageList(); + } + } + /** + * repeated .MutationProto mutation = 3; + */ + public int getMutationCount() { + if (mutationBuilder_ == null) { + return mutation_.size(); + } else { + return mutationBuilder_.getCount(); + } + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(int index) { + if (mutationBuilder_ == null) { + return mutation_.get(index); + } else { + return mutationBuilder_.getMessage(index); + } + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder setMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { + if (mutationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationIsMutable(); + mutation_.set(index, value); + onChanged(); + } else { + mutationBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder setMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { + if (mutationBuilder_ == null) { + ensureMutationIsMutable(); + mutation_.set(index, builderForValue.build()); + onChanged(); + } else { + mutationBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder addMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { + if (mutationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationIsMutable(); + mutation_.add(value); + onChanged(); + } else { + mutationBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder addMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { + if (mutationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationIsMutable(); + mutation_.add(index, value); + onChanged(); + } else { + mutationBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder addMutation( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { + if (mutationBuilder_ == null) { + ensureMutationIsMutable(); + mutation_.add(builderForValue.build()); + onChanged(); + } else { + mutationBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder addMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { + if (mutationBuilder_ == null) { + ensureMutationIsMutable(); + mutation_.add(index, builderForValue.build()); + onChanged(); + } else { + mutationBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder addAllMutation( + java.lang.Iterable values) { + if (mutationBuilder_ == null) { + ensureMutationIsMutable(); + super.addAll(values, mutation_); + onChanged(); + } else { + mutationBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder clearMutation() { + if (mutationBuilder_ == null) { + mutation_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + mutationBuilder_.clear(); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public Builder removeMutation(int index) { + if (mutationBuilder_ == null) { + ensureMutationIsMutable(); + mutation_.remove(index); + onChanged(); + } else { + mutationBuilder_.remove(index); + } + return this; + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder( + int index) { + return getMutationFieldBuilder().getBuilder(index); + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder( + int index) { + if (mutationBuilder_ == null) { + return mutation_.get(index); } else { + return mutationBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .MutationProto mutation = 3; + */ + public java.util.List + getMutationOrBuilderList() { + if (mutationBuilder_ != null) { + return mutationBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(mutation_); + } + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationBuilder() { + return getMutationFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); + } + /** + * repeated .MutationProto mutation = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationBuilder( + int index) { + return getMutationFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); + } + /** + * repeated .MutationProto mutation = 3; + */ + public java.util.List + getMutationBuilderList() { + return getMutationFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> + getMutationFieldBuilder() { + if (mutationBuilder_ == null) { + mutationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( + mutation_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + mutation_ = null; + } + return mutationBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RegionMutation) + } + + static { + defaultInstance = new RegionMutation(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionMutation) + } + + public interface ResultOrExceptionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + /** + * optional .Result result = 1; + */ + boolean hasResult(); + /** + * optional .Result result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + /** + * optional .Result result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional .NameBytesPair exception = 2; + /** + * optional .NameBytesPair exception = 2; + */ + boolean hasException(); + /** + * optional .NameBytesPair exception = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); + /** + * optional .NameBytesPair exception = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); + } + /** + * Protobuf type {@code ResultOrException} + * + *
+   **
+   * Either a Result or an Exception NameBytesPair (keyed by
+   * exception name whose value is the exception stringified)
+   * or maybe empty if no result and no exception.
+   * 
+ */ + public static final class ResultOrException extends + com.google.protobuf.GeneratedMessage + implements ResultOrExceptionOrBuilder { + // Use ResultOrException.newBuilder() to construct. + private ResultOrException(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ResultOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ResultOrException defaultInstance; + public static ResultOrException getDefaultInstance() { + return defaultInstance; + } + + public ResultOrException getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ResultOrException( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = result_.toBuilder(); + } + result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(result_); + result_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = exception_.toBuilder(); + } + exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(exception_); + exception_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ResultOrException parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResultOrException(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + /** + * optional .Result result = 1; + */ + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .Result result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + return result_; + } + /** + * optional .Result result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional .NameBytesPair exception = 2; + public static final int EXCEPTION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; + /** + * optional .NameBytesPair exception = 2; + */ + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .NameBytesPair exception = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + return exception_; + } + /** + * optional .NameBytesPair exception = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + return exception_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasException()) { + if (!getException().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, exception_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, exception_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code ResultOrException} + * + *
+     **
+     * Either a Result or an Exception NameBytesPair (keyed by
+     * exception name whose value is the exception stringified)
+     * or maybe empty if no result and no exception.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + getExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasException()) { + if (!getException().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + /** + * optional .Result result = 1; + */ + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .Result result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + /** + * optional .Result result = 1; + */ + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .Result result = 1; + */ + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .Result result = 1; + */ + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .Result result = 1; + */ + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * optional .Result result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + /** + * optional .Result result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + /** + * optional .Result result = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional .NameBytesPair exception = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; + /** + * optional .NameBytesPair exception = 2; + */ + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .NameBytesPair exception = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + if (exceptionBuilder_ == null) { + return exception_; + } else { + return exceptionBuilder_.getMessage(); + } + } + /** + * optional .NameBytesPair exception = 2; + */ + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .NameBytesPair exception = 2; + */ + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .NameBytesPair exception = 2; + */ + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .NameBytesPair exception = 2; + */ + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .NameBytesPair exception = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + /** + * optional .NameBytesPair exception = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + /** + * optional .NameBytesPair exception = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ResultOrException) + } + + static { + defaultInstance = new ResultOrException(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ResultOrException) + } + + public interface RegionMutationResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .ResultOrException resultOrException = 1; + /** + * repeated .ResultOrException resultOrException = 1; + */ + java.util.List + getResultOrExceptionList(); + /** + * repeated .ResultOrException resultOrException = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index); + /** + * repeated .ResultOrException resultOrException = 1; + */ + int getResultOrExceptionCount(); + /** + * repeated .ResultOrException resultOrException = 1; + */ + java.util.List + getResultOrExceptionOrBuilderList(); + /** + * repeated .ResultOrException resultOrException = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( + int index); + } + /** + * Protobuf type {@code RegionMutationResult} + * + *
+   **
+   * The result of a RegionMutation.
+   * 
+ */ + public static final class RegionMutationResult extends + com.google.protobuf.GeneratedMessage + implements RegionMutationResultOrBuilder { + // Use RegionMutationResult.newBuilder() to construct. + private RegionMutationResult(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RegionMutationResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RegionMutationResult defaultInstance; + public static RegionMutationResult getDefaultInstance() { + return defaultInstance; + } + + public RegionMutationResult getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionMutationResult( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutationResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutationResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionMutationResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionMutationResult(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .ResultOrException resultOrException = 1; + public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1; + private java.util.List resultOrException_; + /** + * repeated .ResultOrException resultOrException = 1; + */ + public java.util.List getResultOrExceptionList() { + return resultOrException_; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public java.util.List + getResultOrExceptionOrBuilderList() { + return resultOrException_; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public int getResultOrExceptionCount() { + return resultOrException_.size(); + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) { + return resultOrException_.get(index); + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( + int index) { + return resultOrException_.get(index); + } + + private void initFields() { + resultOrException_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultOrExceptionCount(); i++) { + if (!getResultOrException(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < resultOrException_.size(); i++) { + output.writeMessage(1, resultOrException_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < resultOrException_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, resultOrException_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult) obj; + + boolean result = true; + result = result && getResultOrExceptionList() + .equals(other.getResultOrExceptionList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultOrExceptionCount() > 0) { + hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getResultOrExceptionList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code RegionMutationResult} + * + *
+     **
+     * The result of a RegionMutation.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutationResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutationResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultOrExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultOrExceptionBuilder_ == null) { + resultOrException_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultOrExceptionBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionMutationResult_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult(this); + int from_bitField0_ = bitField0_; + if (resultOrExceptionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.resultOrException_ = resultOrException_; + } else { + result.resultOrException_ = resultOrExceptionBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.getDefaultInstance()) return this; + if (resultOrExceptionBuilder_ == null) { + if (!other.resultOrException_.isEmpty()) { + if (resultOrException_.isEmpty()) { + resultOrException_ = other.resultOrException_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultOrExceptionIsMutable(); + resultOrException_.addAll(other.resultOrException_); + } + onChanged(); + } + } else { + if (!other.resultOrException_.isEmpty()) { + if (resultOrExceptionBuilder_.isEmpty()) { + resultOrExceptionBuilder_.dispose(); + resultOrExceptionBuilder_ = null; + resultOrException_ = other.resultOrException_; + bitField0_ = (bitField0_ & ~0x00000001); + resultOrExceptionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultOrExceptionFieldBuilder() : null; + } else { + resultOrExceptionBuilder_.addAllMessages(other.resultOrException_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultOrExceptionCount(); i++) { + if (!getResultOrException(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .ResultOrException resultOrException = 1; + private java.util.List resultOrException_ = + java.util.Collections.emptyList(); + private void ensureResultOrExceptionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = new java.util.ArrayList(resultOrException_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_; + + /** + * repeated .ResultOrException resultOrException = 1; + */ + public java.util.List getResultOrExceptionList() { + if (resultOrExceptionBuilder_ == null) { + return java.util.Collections.unmodifiableList(resultOrException_); + } else { + return resultOrExceptionBuilder_.getMessageList(); + } + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public int getResultOrExceptionCount() { + if (resultOrExceptionBuilder_ == null) { + return resultOrException_.size(); + } else { + return resultOrExceptionBuilder_.getCount(); + } + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) { + if (resultOrExceptionBuilder_ == null) { + return resultOrException_.get(index); + } else { + return resultOrExceptionBuilder_.getMessage(index); + } + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder setResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) { + if (resultOrExceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultOrExceptionIsMutable(); + resultOrException_.set(index, value); + onChanged(); + } else { + resultOrExceptionBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder setResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.set(index, builderForValue.build()); + onChanged(); + } else { + resultOrExceptionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) { + if (resultOrExceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultOrExceptionIsMutable(); + resultOrException_.add(value); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder addResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) { + if (resultOrExceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultOrExceptionIsMutable(); + resultOrException_.add(index, value); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder addResultOrException( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.add(builderForValue.build()); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder addResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.add(index, builderForValue.build()); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder addAllResultOrException( + java.lang.Iterable values) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + super.addAll(values, resultOrException_); + onChanged(); + } else { + resultOrExceptionBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder clearResultOrException() { + if (resultOrExceptionBuilder_ == null) { + resultOrException_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultOrExceptionBuilder_.clear(); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public Builder removeResultOrException(int index) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.remove(index); + onChanged(); + } else { + resultOrExceptionBuilder_.remove(index); + } + return this; + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder getResultOrExceptionBuilder( + int index) { + return getResultOrExceptionFieldBuilder().getBuilder(index); + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder( + int index) { + if (resultOrExceptionBuilder_ == null) { + return resultOrException_.get(index); } else { + return resultOrExceptionBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public java.util.List + getResultOrExceptionOrBuilderList() { + if (resultOrExceptionBuilder_ != null) { + return resultOrExceptionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(resultOrException_); + } + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder() { + return getResultOrExceptionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()); + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder( + int index) { + return getResultOrExceptionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()); + } + /** + * repeated .ResultOrException resultOrException = 1; + */ + public java.util.List + getResultOrExceptionBuilderList() { + return getResultOrExceptionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> + getResultOrExceptionFieldBuilder() { + if (resultOrExceptionBuilder_ == null) { + resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>( + resultOrException_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + resultOrException_ = null; + } + return resultOrExceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RegionMutationResult) + } + + static { + defaultInstance = new RegionMutationResult(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionMutationResult) + } + + public interface MultiRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionMutation regionMutation = 1; + /** + * repeated .RegionMutation regionMutation = 1; + */ + java.util.List + getRegionMutationList(); + /** + * repeated .RegionMutation regionMutation = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation getRegionMutation(int index); + /** + * repeated .RegionMutation regionMutation = 1; + */ + int getRegionMutationCount(); + /** + * repeated .RegionMutation regionMutation = 1; + */ + java.util.List + getRegionMutationOrBuilderList(); + /** + * repeated .RegionMutation regionMutation = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder getRegionMutationOrBuilder( + int index); + } + /** + * Protobuf type {@code MultiRequest} + * + *
+   **
+   * Execute a list of actions on a given region in order.
+   * 
+ */ + public static final class MultiRequest extends + com.google.protobuf.GeneratedMessage + implements MultiRequestOrBuilder { + // Use MultiRequest.newBuilder() to construct. + private MultiRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MultiRequest defaultInstance; + public static MultiRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regionMutation_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + regionMutation_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regionMutation_ = java.util.Collections.unmodifiableList(regionMutation_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .RegionMutation regionMutation = 1; + public static final int REGIONMUTATION_FIELD_NUMBER = 1; + private java.util.List regionMutation_; + /** + * repeated .RegionMutation regionMutation = 1; + */ + public java.util.List getRegionMutationList() { + return regionMutation_; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public java.util.List + getRegionMutationOrBuilderList() { + return regionMutation_; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public int getRegionMutationCount() { + return regionMutation_.size(); + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation getRegionMutation(int index) { + return regionMutation_.get(index); + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder getRegionMutationOrBuilder( + int index) { + return regionMutation_.get(index); + } + + private void initFields() { + regionMutation_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionMutationCount(); i++) { + if (!getRegionMutation(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionMutation_.size(); i++) { + output.writeMessage(1, regionMutation_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < regionMutation_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionMutation_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; + + boolean result = true; + result = result && getRegionMutationList() + .equals(other.getRegionMutationList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionMutationCount() > 0) { + hash = (37 * hash) + REGIONMUTATION_FIELD_NUMBER; + hash = (53 * hash) + getRegionMutationList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MultiRequest} + * + *
+     **
+     * Execute a list of actions on a given region in order.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionMutationFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionMutationBuilder_ == null) { + regionMutation_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + regionMutationBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); + int from_bitField0_ = bitField0_; + if (regionMutationBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionMutation_ = java.util.Collections.unmodifiableList(regionMutation_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionMutation_ = regionMutation_; + } else { + result.regionMutation_ = regionMutationBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; + if (regionMutationBuilder_ == null) { + if (!other.regionMutation_.isEmpty()) { + if (regionMutation_.isEmpty()) { + regionMutation_ = other.regionMutation_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionMutationIsMutable(); + regionMutation_.addAll(other.regionMutation_); + } + onChanged(); + } + } else { + if (!other.regionMutation_.isEmpty()) { + if (regionMutationBuilder_.isEmpty()) { + regionMutationBuilder_.dispose(); + regionMutationBuilder_ = null; + regionMutation_ = other.regionMutation_; + bitField0_ = (bitField0_ & ~0x00000001); + regionMutationBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionMutationFieldBuilder() : null; + } else { + regionMutationBuilder_.addAllMessages(other.regionMutation_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionMutationCount(); i++) { + if (!getRegionMutation(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .RegionMutation regionMutation = 1; + private java.util.List regionMutation_ = + java.util.Collections.emptyList(); + private void ensureRegionMutationIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionMutation_ = new java.util.ArrayList(regionMutation_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder> regionMutationBuilder_; + + /** + * repeated .RegionMutation regionMutation = 1; + */ + public java.util.List getRegionMutationList() { + if (regionMutationBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionMutation_); + } else { + return regionMutationBuilder_.getMessageList(); + } + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public int getRegionMutationCount() { + if (regionMutationBuilder_ == null) { + return regionMutation_.size(); + } else { + return regionMutationBuilder_.getCount(); + } + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation getRegionMutation(int index) { + if (regionMutationBuilder_ == null) { + return regionMutation_.get(index); + } else { + return regionMutationBuilder_.getMessage(index); + } + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder setRegionMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation value) { + if (regionMutationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionMutationIsMutable(); + regionMutation_.set(index, value); + onChanged(); + } else { + regionMutationBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder setRegionMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder builderForValue) { + if (regionMutationBuilder_ == null) { + ensureRegionMutationIsMutable(); + regionMutation_.set(index, builderForValue.build()); + onChanged(); + } else { + regionMutationBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder addRegionMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation value) { + if (regionMutationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionMutationIsMutable(); + regionMutation_.add(value); + onChanged(); + } else { + regionMutationBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder addRegionMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation value) { + if (regionMutationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionMutationIsMutable(); + regionMutation_.add(index, value); + onChanged(); + } else { + regionMutationBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder addRegionMutation( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder builderForValue) { + if (regionMutationBuilder_ == null) { + ensureRegionMutationIsMutable(); + regionMutation_.add(builderForValue.build()); + onChanged(); + } else { + regionMutationBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder addRegionMutation( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder builderForValue) { + if (regionMutationBuilder_ == null) { + ensureRegionMutationIsMutable(); + regionMutation_.add(index, builderForValue.build()); + onChanged(); + } else { + regionMutationBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder addAllRegionMutation( + java.lang.Iterable values) { + if (regionMutationBuilder_ == null) { + ensureRegionMutationIsMutable(); + super.addAll(values, regionMutation_); + onChanged(); + } else { + regionMutationBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder clearRegionMutation() { + if (regionMutationBuilder_ == null) { + regionMutation_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + regionMutationBuilder_.clear(); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public Builder removeRegionMutation(int index) { + if (regionMutationBuilder_ == null) { + ensureRegionMutationIsMutable(); + regionMutation_.remove(index); + onChanged(); + } else { + regionMutationBuilder_.remove(index); + } + return this; + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder getRegionMutationBuilder( + int index) { + return getRegionMutationFieldBuilder().getBuilder(index); + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder getRegionMutationOrBuilder( + int index) { + if (regionMutationBuilder_ == null) { + return regionMutation_.get(index); } else { + return regionMutationBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public java.util.List + getRegionMutationOrBuilderList() { + if (regionMutationBuilder_ != null) { + return regionMutationBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(regionMutation_); + } + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder addRegionMutationBuilder() { + return getRegionMutationFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.getDefaultInstance()); + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder addRegionMutationBuilder( + int index) { + return getRegionMutationFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.getDefaultInstance()); + } + /** + * repeated .RegionMutation regionMutation = 1; + */ + public java.util.List + getRegionMutationBuilderList() { + return getRegionMutationFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder> + getRegionMutationFieldBuilder() { + if (regionMutationBuilder_ == null) { + regionMutationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationOrBuilder>( + regionMutation_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + regionMutation_ = null; + } + return regionMutationBuilder_; + } + // @@protoc_insertion_point(builder_scope:MultiRequest) } @@ -26279,29 +26977,29 @@ public final class ClientProtos { public interface MultiResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // repeated .ActionResult result = 1; + // repeated .RegionMutationResult regionMutationResult = 1; /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - java.util.List - getResultList(); + java.util.List + getRegionMutationResultList(); /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult getRegionMutationResult(int index); /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - int getResultCount(); + int getRegionMutationResultCount(); /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - java.util.List - getResultOrBuilderList(); + java.util.List + getRegionMutationResultOrBuilderList(); /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder getRegionMutationResultOrBuilder( int index); } /** @@ -26357,10 +27055,10 @@ public final class ClientProtos { } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new java.util.ArrayList(); + regionMutationResult_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.PARSER, extensionRegistry)); + regionMutationResult_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.PARSER, extensionRegistry)); break; } } @@ -26372,7 +27070,7 @@ public final class ClientProtos { e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - result_ = java.util.Collections.unmodifiableList(result_); + regionMutationResult_ = java.util.Collections.unmodifiableList(regionMutationResult_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -26405,52 +27103,52 @@ public final class ClientProtos { return PARSER; } - // repeated .ActionResult result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private java.util.List result_; + // repeated .RegionMutationResult regionMutationResult = 1; + public static final int REGIONMUTATIONRESULT_FIELD_NUMBER = 1; + private java.util.List regionMutationResult_; /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public java.util.List getResultList() { - return result_; + public java.util.List getRegionMutationResultList() { + return regionMutationResult_; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public java.util.List - getResultOrBuilderList() { - return result_; + public java.util.List + getRegionMutationResultOrBuilderList() { + return regionMutationResult_; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public int getResultCount() { - return result_.size(); + public int getRegionMutationResultCount() { + return regionMutationResult_.size(); } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { - return result_.get(index); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult getRegionMutationResult(int index) { + return regionMutationResult_.get(index); } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder getRegionMutationResultOrBuilder( int index) { - return result_.get(index); + return regionMutationResult_.get(index); } private void initFields() { - result_ = java.util.Collections.emptyList(); + regionMutationResult_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { + for (int i = 0; i < getRegionMutationResultCount(); i++) { + if (!getRegionMutationResult(i).isInitialized()) { memoizedIsInitialized = 0; return false; } @@ -26462,8 +27160,8 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - for (int i = 0; i < result_.size(); i++) { - output.writeMessage(1, result_.get(i)); + for (int i = 0; i < regionMutationResult_.size(); i++) { + output.writeMessage(1, regionMutationResult_.get(i)); } getUnknownFields().writeTo(output); } @@ -26474,9 +27172,9 @@ public final class ClientProtos { if (size != -1) return size; size = 0; - for (int i = 0; i < result_.size(); i++) { + for (int i = 0; i < regionMutationResult_.size(); i++) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_.get(i)); + .computeMessageSize(1, regionMutationResult_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -26501,8 +27199,8 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj; boolean result = true; - result = result && getResultList() - .equals(other.getResultList()); + result = result && getRegionMutationResultList() + .equals(other.getRegionMutationResultList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -26516,9 +27214,9 @@ public final class ClientProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResultCount() > 0) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResultList().hashCode(); + if (getRegionMutationResultCount() > 0) { + hash = (37 * hash) + REGIONMUTATIONRESULT_FIELD_NUMBER; + hash = (53 * hash) + getRegionMutationResultList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; @@ -26621,7 +27319,7 @@ public final class ClientProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); + getRegionMutationResultFieldBuilder(); } } private static Builder create() { @@ -26630,11 +27328,11 @@ public final class ClientProtos { public Builder clear() { super.clear(); - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); + if (regionMutationResultBuilder_ == null) { + regionMutationResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { - resultBuilder_.clear(); + regionMutationResultBuilder_.clear(); } return this; } @@ -26663,14 +27361,14 @@ public final class ClientProtos { public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this); int from_bitField0_ = bitField0_; - if (resultBuilder_ == null) { + if (regionMutationResultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = java.util.Collections.unmodifiableList(result_); + regionMutationResult_ = java.util.Collections.unmodifiableList(regionMutationResult_); bitField0_ = (bitField0_ & ~0x00000001); } - result.result_ = result_; + result.regionMutationResult_ = regionMutationResult_; } else { - result.result_ = resultBuilder_.build(); + result.regionMutationResult_ = regionMutationResultBuilder_.build(); } onBuilt(); return result; @@ -26687,29 +27385,29 @@ public final class ClientProtos { public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; - if (resultBuilder_ == null) { - if (!other.result_.isEmpty()) { - if (result_.isEmpty()) { - result_ = other.result_; + if (regionMutationResultBuilder_ == null) { + if (!other.regionMutationResult_.isEmpty()) { + if (regionMutationResult_.isEmpty()) { + regionMutationResult_ = other.regionMutationResult_; bitField0_ = (bitField0_ & ~0x00000001); } else { - ensureResultIsMutable(); - result_.addAll(other.result_); + ensureRegionMutationResultIsMutable(); + regionMutationResult_.addAll(other.regionMutationResult_); } onChanged(); } } else { - if (!other.result_.isEmpty()) { - if (resultBuilder_.isEmpty()) { - resultBuilder_.dispose(); - resultBuilder_ = null; - result_ = other.result_; + if (!other.regionMutationResult_.isEmpty()) { + if (regionMutationResultBuilder_.isEmpty()) { + regionMutationResultBuilder_.dispose(); + regionMutationResultBuilder_ = null; + regionMutationResult_ = other.regionMutationResult_; bitField0_ = (bitField0_ & ~0x00000001); - resultBuilder_ = + regionMutationResultBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResultFieldBuilder() : null; + getRegionMutationResultFieldBuilder() : null; } else { - resultBuilder_.addAllMessages(other.result_); + regionMutationResultBuilder_.addAllMessages(other.regionMutationResult_); } } } @@ -26718,8 +27416,8 @@ public final class ClientProtos { } public final boolean isInitialized() { - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { + for (int i = 0; i < getRegionMutationResultCount(); i++) { + if (!getRegionMutationResult(i).isInitialized()) { return false; } @@ -26746,244 +27444,244 @@ public final class ClientProtos { } private int bitField0_; - // repeated .ActionResult result = 1; - private java.util.List result_ = + // repeated .RegionMutationResult regionMutationResult = 1; + private java.util.List regionMutationResult_ = java.util.Collections.emptyList(); - private void ensureResultIsMutable() { + private void ensureRegionMutationResultIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new java.util.ArrayList(result_); + regionMutationResult_ = new java.util.ArrayList(regionMutationResult_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> resultBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder> regionMutationResultBuilder_; /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public java.util.List getResultList() { - if (resultBuilder_ == null) { - return java.util.Collections.unmodifiableList(result_); + public java.util.List getRegionMutationResultList() { + if (regionMutationResultBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionMutationResult_); } else { - return resultBuilder_.getMessageList(); + return regionMutationResultBuilder_.getMessageList(); } } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public int getResultCount() { - if (resultBuilder_ == null) { - return result_.size(); + public int getRegionMutationResultCount() { + if (regionMutationResultBuilder_ == null) { + return regionMutationResult_.size(); } else { - return resultBuilder_.getCount(); + return regionMutationResultBuilder_.getCount(); } } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { - if (resultBuilder_ == null) { - return result_.get(index); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult getRegionMutationResult(int index) { + if (regionMutationResultBuilder_ == null) { + return regionMutationResult_.get(index); } else { - return resultBuilder_.getMessage(index); + return regionMutationResultBuilder_.getMessage(index); } } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { - if (resultBuilder_ == null) { + public Builder setRegionMutationResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult value) { + if (regionMutationResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureResultIsMutable(); - result_.set(index, value); + ensureRegionMutationResultIsMutable(); + regionMutationResult_.set(index, value); onChanged(); } else { - resultBuilder_.setMessage(index, value); + regionMutationResultBuilder_.setMessage(index, value); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.set(index, builderForValue.build()); + public Builder setRegionMutationResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder builderForValue) { + if (regionMutationResultBuilder_ == null) { + ensureRegionMutationResultIsMutable(); + regionMutationResult_.set(index, builderForValue.build()); onChanged(); } else { - resultBuilder_.setMessage(index, builderForValue.build()); + regionMutationResultBuilder_.setMessage(index, builderForValue.build()); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { - if (resultBuilder_ == null) { + public Builder addRegionMutationResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult value) { + if (regionMutationResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureResultIsMutable(); - result_.add(value); + ensureRegionMutationResultIsMutable(); + regionMutationResult_.add(value); onChanged(); } else { - resultBuilder_.addMessage(value); + regionMutationResultBuilder_.addMessage(value); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { - if (resultBuilder_ == null) { + public Builder addRegionMutationResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult value) { + if (regionMutationResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureResultIsMutable(); - result_.add(index, value); + ensureRegionMutationResultIsMutable(); + regionMutationResult_.add(index, value); onChanged(); } else { - resultBuilder_.addMessage(index, value); + regionMutationResultBuilder_.addMessage(index, value); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder addResult( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(builderForValue.build()); + public Builder addRegionMutationResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder builderForValue) { + if (regionMutationResultBuilder_ == null) { + ensureRegionMutationResultIsMutable(); + regionMutationResult_.add(builderForValue.build()); onChanged(); } else { - resultBuilder_.addMessage(builderForValue.build()); + regionMutationResultBuilder_.addMessage(builderForValue.build()); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(index, builderForValue.build()); + public Builder addRegionMutationResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder builderForValue) { + if (regionMutationResultBuilder_ == null) { + ensureRegionMutationResultIsMutable(); + regionMutationResult_.add(index, builderForValue.build()); onChanged(); } else { - resultBuilder_.addMessage(index, builderForValue.build()); + regionMutationResultBuilder_.addMessage(index, builderForValue.build()); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder addAllResult( - java.lang.Iterable values) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - super.addAll(values, result_); + public Builder addAllRegionMutationResult( + java.lang.Iterable values) { + if (regionMutationResultBuilder_ == null) { + ensureRegionMutationResultIsMutable(); + super.addAll(values, regionMutationResult_); onChanged(); } else { - resultBuilder_.addAllMessages(values); + regionMutationResultBuilder_.addAllMessages(values); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); + public Builder clearRegionMutationResult() { + if (regionMutationResultBuilder_ == null) { + regionMutationResult_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { - resultBuilder_.clear(); + regionMutationResultBuilder_.clear(); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public Builder removeResult(int index) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.remove(index); + public Builder removeRegionMutationResult(int index) { + if (regionMutationResultBuilder_ == null) { + ensureRegionMutationResultIsMutable(); + regionMutationResult_.remove(index); onChanged(); } else { - resultBuilder_.remove(index); + regionMutationResultBuilder_.remove(index); } return this; } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder getResultBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder getRegionMutationResultBuilder( int index) { - return getResultFieldBuilder().getBuilder(index); + return getRegionMutationResultFieldBuilder().getBuilder(index); } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder getRegionMutationResultOrBuilder( int index) { - if (resultBuilder_ == null) { - return result_.get(index); } else { - return resultBuilder_.getMessageOrBuilder(index); + if (regionMutationResultBuilder_ == null) { + return regionMutationResult_.get(index); } else { + return regionMutationResultBuilder_.getMessageOrBuilder(index); } } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public java.util.List - getResultOrBuilderList() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilderList(); + public java.util.List + getRegionMutationResultOrBuilderList() { + if (regionMutationResultBuilder_ != null) { + return regionMutationResultBuilder_.getMessageOrBuilderList(); } else { - return java.util.Collections.unmodifiableList(result_); + return java.util.Collections.unmodifiableList(regionMutationResult_); } } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder() { - return getResultFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder addRegionMutationResultBuilder() { + return getRegionMutationResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.getDefaultInstance()); } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder addRegionMutationResultBuilder( int index) { - return getResultFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); + return getRegionMutationResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.getDefaultInstance()); } /** - * repeated .ActionResult result = 1; + * repeated .RegionMutationResult regionMutationResult = 1; */ - public java.util.List - getResultBuilderList() { - return getResultFieldBuilder().getBuilderList(); + public java.util.List + getRegionMutationResultBuilderList() { + return getRegionMutationResultFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder>( - result_, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder> + getRegionMutationResultFieldBuilder() { + if (regionMutationResultBuilder_ == null) { + regionMutationResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResultOrBuilder>( + regionMutationResult_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); - result_ = null; + regionMutationResult_ = null; } - return resultBuilder_; + return regionMutationResultBuilder_; } // @@protoc_insertion_point(builder_scope:MultiResponse) @@ -27766,15 +28464,20 @@ public final class ClientProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_CoprocessorServiceResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiAction_descriptor; + internal_static_RegionMutation_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiAction_fieldAccessorTable; + internal_static_RegionMutation_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_ActionResult_descriptor; + internal_static_ResultOrException_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ActionResult_fieldAccessorTable; + internal_static_ResultOrException_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionMutationResult_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionMutationResult_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_MultiRequest_descriptor; private static @@ -27870,25 +28573,27 @@ public final class ClientProtos { "\n\004call\030\002 \002(\0132\027.CoprocessorServiceCall\"]\n" + "\032CoprocessorServiceResponse\022 \n\006region\030\001 " + "\002(\0132\020.RegionSpecifier\022\035\n\005value\030\002 \002(\0132\016.N" + - "ameBytesPair\"B\n\013MultiAction\022 \n\010mutation\030" + - "\001 \001(\0132\016.MutationProto\022\021\n\003get\030\002 \001(\0132\004.Get" + - "\"I\n\014ActionResult\022\026\n\005value\030\001 \001(\0132\007.Result" + - "\022!\n\texception\030\002 \001(\0132\016.NameBytesPair\"^\n\014M", - "ultiRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpec" + - "ifier\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022\016\n\006a" + - "tomic\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006result\030\001" + - " \003(\0132\r.ActionResult2\342\002\n\rClientService\022 \n" + - "\003Get\022\013.GetRequest\032\014.GetResponse\022/\n\010Multi" + - "Get\022\020.MultiGetRequest\032\021.MultiGetResponse" + - "\022)\n\006Mutate\022\016.MutateRequest\032\017.MutateRespo" + - "nse\022#\n\004Scan\022\014.ScanRequest\032\r.ScanResponse" + - "\022>\n\rBulkLoadHFile\022\025.BulkLoadHFileRequest" + - "\032\026.BulkLoadHFileResponse\022F\n\013ExecService\022", - "\032.CoprocessorServiceRequest\032\033.Coprocesso" + - "rServiceResponse\022&\n\005Multi\022\r.MultiRequest" + - "\032\016.MultiResponseBB\n*org.apache.hadoop.hb" + - "ase.protobuf.generatedB\014ClientProtosH\001\210\001" + - "\001\240\001\001" + "ameBytesPair\"d\n\016RegionMutation\022 \n\006region" + + "\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006atomic\030\002 \001(\010" + + "\022 \n\010mutation\030\003 \003(\0132\016.MutationProto\"O\n\021Re" + + "sultOrException\022\027\n\006result\030\001 \001(\0132\007.Result", + "\022!\n\texception\030\002 \001(\0132\016.NameBytesPair\"E\n\024R" + + "egionMutationResult\022-\n\021resultOrException" + + "\030\001 \003(\0132\022.ResultOrException\"7\n\014MultiReque" + + "st\022\'\n\016regionMutation\030\001 \003(\0132\017.RegionMutat" + + "ion\"D\n\rMultiResponse\0223\n\024regionMutationRe" + + "sult\030\001 \003(\0132\025.RegionMutationResult2\342\002\n\rCl" + + "ientService\022 \n\003Get\022\013.GetRequest\032\014.GetRes" + + "ponse\022/\n\010MultiGet\022\020.MultiGetRequest\032\021.Mu" + + "ltiGetResponse\022)\n\006Mutate\022\016.MutateRequest" + + "\032\017.MutateResponse\022#\n\004Scan\022\014.ScanRequest\032", + "\r.ScanResponse\022>\n\rBulkLoadHFile\022\025.BulkLo" + + "adHFileRequest\032\026.BulkLoadHFileResponse\022F" + + "\n\013ExecService\022\032.CoprocessorServiceReques" + + "t\032\033.CoprocessorServiceResponse\022&\n\005Multi\022" + + "\r.MultiRequest\032\016.MultiResponseBB\n*org.ap" + + "ache.hadoop.hbase.protobuf.generatedB\014Cl" + + "ientProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -28027,30 +28732,36 @@ public final class ClientProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceResponse_descriptor, new java.lang.String[] { "Region", "Value", }); - internal_static_MultiAction_descriptor = + internal_static_RegionMutation_descriptor = getDescriptor().getMessageTypes().get(19); - internal_static_MultiAction_fieldAccessorTable = new + internal_static_RegionMutation_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiAction_descriptor, - new java.lang.String[] { "Mutation", "Get", }); - internal_static_ActionResult_descriptor = + internal_static_RegionMutation_descriptor, + new java.lang.String[] { "Region", "Atomic", "Mutation", }); + internal_static_ResultOrException_descriptor = getDescriptor().getMessageTypes().get(20); - internal_static_ActionResult_fieldAccessorTable = new + internal_static_ResultOrException_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ActionResult_descriptor, - new java.lang.String[] { "Value", "Exception", }); - internal_static_MultiRequest_descriptor = + internal_static_ResultOrException_descriptor, + new java.lang.String[] { "Result", "Exception", }); + internal_static_RegionMutationResult_descriptor = getDescriptor().getMessageTypes().get(21); + internal_static_RegionMutationResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionMutationResult_descriptor, + new java.lang.String[] { "ResultOrException", }); + internal_static_MultiRequest_descriptor = + getDescriptor().getMessageTypes().get(22); internal_static_MultiRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiRequest_descriptor, - new java.lang.String[] { "Region", "Action", "Atomic", }); + new java.lang.String[] { "RegionMutation", }); internal_static_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(23); internal_static_MultiResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiResponse_descriptor, - new java.lang.String[] { "Result", }); + new java.lang.String[] { "RegionMutationResult", }); return null; } }; diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java index 603f4c5b84d..313d7b2b261 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -3662,6 +3662,26 @@ public final class RPCProtos { * */ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder getCellBlockMetaOrBuilder(); + + // optional uint32 priority = 6; + /** + * optional uint32 priority = 6; + * + *
+     * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+     * See HConstants.
+     * 
+ */ + boolean hasPriority(); + /** + * optional uint32 priority = 6; + * + *
+     * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+     * See HConstants.
+     * 
+ */ + int getPriority(); } /** * Protobuf type {@code RequestHeader} @@ -3759,6 +3779,11 @@ public final class RPCProtos { bitField0_ |= 0x00000010; break; } + case 48: { + bitField0_ |= 0x00000020; + priority_ = input.readUInt32(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -3946,12 +3971,39 @@ public final class RPCProtos { return cellBlockMeta_; } + // optional uint32 priority = 6; + public static final int PRIORITY_FIELD_NUMBER = 6; + private int priority_; + /** + * optional uint32 priority = 6; + * + *
+     * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+     * See HConstants.
+     * 
+ */ + public boolean hasPriority() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional uint32 priority = 6; + * + *
+     * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+     * See HConstants.
+     * 
+ */ + public int getPriority() { + return priority_; + } + private void initFields() { callId_ = 0; traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); methodName_ = ""; requestParam_ = false; cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + priority_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -3980,6 +4032,9 @@ public final class RPCProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, cellBlockMeta_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeUInt32(6, priority_); + } getUnknownFields().writeTo(output); } @@ -4009,6 +4064,10 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, cellBlockMeta_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(6, priority_); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -4057,6 +4116,11 @@ public final class RPCProtos { result = result && getCellBlockMeta() .equals(other.getCellBlockMeta()); } + result = result && (hasPriority() == other.hasPriority()); + if (hasPriority()) { + result = result && (getPriority() + == other.getPriority()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -4090,6 +4154,10 @@ public final class RPCProtos { hash = (37 * hash) + CELL_BLOCK_META_FIELD_NUMBER; hash = (53 * hash) + getCellBlockMeta().hashCode(); } + if (hasPriority()) { + hash = (37 * hash) + PRIORITY_FIELD_NUMBER; + hash = (53 * hash) + getPriority(); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -4223,6 +4291,8 @@ public final class RPCProtos { cellBlockMetaBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); + priority_ = 0; + bitField0_ = (bitField0_ & ~0x00000020); return this; } @@ -4279,6 +4349,10 @@ public final class RPCProtos { } else { result.cellBlockMeta_ = cellBlockMetaBuilder_.build(); } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.priority_ = priority_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -4312,6 +4386,9 @@ public final class RPCProtos { if (other.hasCellBlockMeta()) { mergeCellBlockMeta(other.getCellBlockMeta()); } + if (other.hasPriority()) { + setPriority(other.getPriority()); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -4781,6 +4858,59 @@ public final class RPCProtos { return cellBlockMetaBuilder_; } + // optional uint32 priority = 6; + private int priority_ ; + /** + * optional uint32 priority = 6; + * + *
+       * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+       * See HConstants.
+       * 
+ */ + public boolean hasPriority() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional uint32 priority = 6; + * + *
+       * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+       * See HConstants.
+       * 
+ */ + public int getPriority() { + return priority_; + } + /** + * optional uint32 priority = 6; + * + *
+       * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+       * See HConstants.
+       * 
+ */ + public Builder setPriority(int value) { + bitField0_ |= 0x00000020; + priority_ = value; + onChanged(); + return this; + } + /** + * optional uint32 priority = 6; + * + *
+       * 0 is NORMAL priority.  100 is HIGH.  If no priority, treat it as NORMAL.
+       * See HConstants.
+       * 
+ */ + public Builder clearPriority() { + bitField0_ = (bitField0_ & ~0x00000020); + priority_ = 0; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:RequestHeader) } @@ -5797,15 +5927,15 @@ public final class RPCProtos { "\001(\r\"|\n\021ExceptionResponse\022\034\n\024exception_cl" + "ass_name\030\001 \001(\t\022\023\n\013stack_trace\030\002 \001(\t\022\020\n\010h" + "ostname\030\003 \001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_not_re", - "try\030\005 \001(\010\"\224\001\n\rRequestHeader\022\017\n\007call_id\030\001" + + "try\030\005 \001(\010\"\246\001\n\rRequestHeader\022\017\n\007call_id\030\001" + " \001(\r\022\035\n\ntrace_info\030\002 \001(\0132\t.RPCTInfo\022\023\n\013m" + "ethod_name\030\003 \001(\t\022\025\n\rrequest_param\030\004 \001(\010\022" + "\'\n\017cell_block_meta\030\005 \001(\0132\016.CellBlockMeta" + - "\"q\n\016ResponseHeader\022\017\n\007call_id\030\001 \001(\r\022%\n\te" + - "xception\030\002 \001(\0132\022.ExceptionResponse\022\'\n\017ce" + - "ll_block_meta\030\003 \001(\0132\016.CellBlockMetaB<\n*o" + - "rg.apache.hadoop.hbase.protobuf.generate" + - "dB\tRPCProtosH\001\240\001\001" + "\022\020\n\010priority\030\006 \001(\r\"q\n\016ResponseHeader\022\017\n\007" + + "call_id\030\001 \001(\r\022%\n\texception\030\002 \001(\0132\022.Excep" + + "tionResponse\022\'\n\017cell_block_meta\030\003 \001(\0132\016." + + "CellBlockMetaB<\n*org.apache.hadoop.hbase" + + ".protobuf.generatedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -5841,7 +5971,7 @@ public final class RPCProtos { internal_static_RequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RequestHeader_descriptor, - new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", }); + new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", }); internal_static_ResponseHeader_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_ResponseHeader_fieldAccessorTable = new diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto index 3629504fd87..60e802058c2 100644 --- a/hbase-protocol/src/main/protobuf/Client.proto +++ b/hbase-protocol/src/main/protobuf/Client.proto @@ -319,42 +319,41 @@ message CoprocessorServiceResponse { } /** - * An action that is part of MultiRequest. - * This is a union type - exactly one of the fields will be set. + * Mutations to run against a Region. */ -message MultiAction { - optional MutationProto mutation = 1; - optional Get get = 2; +message RegionMutation { + required RegionSpecifier region = 1; + // When set, run mutations as atomic unit. + optional bool atomic = 2; + repeated MutationProto mutation = 3; } /** - * An individual action result. The result will in the - * same order as the action in the request. If an action - * returns a value, it is set in value field. If it doesn't - * return anything, the result will be empty. If an action - * fails to execute due to any exception, the exception - * is returned as a stringified parameter. + * Either a Result or an Exception NameBytesPair (keyed by + * exception name whose value is the exception stringified) + * or maybe empty if no result and no exception. */ -message ActionResult { - optional Result value = 1; +message ResultOrException { + optional Result result = 1; optional NameBytesPair exception = 2; } /** - * You can execute a list of actions on a given region in order. - * - * If it is a list of mutate actions, atomic can be set - * to make sure they can be processed atomically, just like - * RowMutations. + * The result of a RegionMutation. + */ +message RegionMutationResult { + repeated ResultOrException resultOrException = 1; +} + +/** + * Execute a list of actions on a given region in order. */ message MultiRequest { - required RegionSpecifier region = 1; - repeated MultiAction action = 2; - optional bool atomic = 3; + repeated RegionMutation regionMutation = 1; } message MultiResponse { - repeated ActionResult result = 1; + repeated RegionMutationResult regionMutationResult = 1; } diff --git a/hbase-protocol/src/main/protobuf/RPC.proto b/hbase-protocol/src/main/protobuf/RPC.proto index 78b09b69628..9bf69a17cbd 100644 --- a/hbase-protocol/src/main/protobuf/RPC.proto +++ b/hbase-protocol/src/main/protobuf/RPC.proto @@ -119,7 +119,9 @@ message RequestHeader { optional bool request_param = 4; // If present, then an encoded data block follows. optional CellBlockMeta cell_block_meta = 5; - // TODO: Have client specify priority + // 0 is NORMAL priority. 100 is HIGH. If no priority, treat it as NORMAL. + // See HConstants. + optional uint32 priority = 6; } message ResponseHeader { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java index aa7018b4f05..6051769fae9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java @@ -83,8 +83,7 @@ class AnnotationReadingPriorityFunction implements PriorityFunction { CompactRegionRequest.class, GetRequest.class, MutateRequest.class, - ScanRequest.class, - MultiRequest.class + ScanRequest.class }; // Some caches for helping performance @@ -101,7 +100,7 @@ class AnnotationReadingPriorityFunction implements PriorityFunction { if (p != null) { // Since we protobuf'd, and then subsequently, when we went with pb style, method names // are capitalized. This meant that this brittle compare of method names gotten by - // reflection no longer matched the method names comeing in over pb. TODO: Get rid of this + // reflection no longer matched the method names coming in over pb. TODO: Get rid of this // check. For now, workaround is to capitalize the names we got from reflection so they // have chance of matching the pb ones. String capitalizedMethodName = capitalize(m.getName()); @@ -109,7 +108,6 @@ class AnnotationReadingPriorityFunction implements PriorityFunction { } } this.annotatedQos = qosMap; - if (methodMap.get("getRegion") == null) { methodMap.put("hasRegion", new HashMap, Method>()); methodMap.put("getRegion", new HashMap, Method>()); @@ -148,10 +146,14 @@ class AnnotationReadingPriorityFunction implements PriorityFunction { if (priorityByAnnotation != null) { return priorityByAnnotation; } - if (param == null) { return HConstants.NORMAL_QOS; } + if (methodName.equalsIgnoreCase("multi") && param instanceof MultiRequest) { + // The multi call has its priority set in the header. All calls should work this way but + // only this one has been converted so far. No priority == NORMAL_QOS. + return header.hasPriority()? header.getPriority(): HConstants.NORMAL_QOS; + } String cls = param.getClass().getName(); Class rpcArgClass = argumentToClassMap.get(cls); RegionSpecifier regionSpecifier = null; @@ -201,4 +203,4 @@ class AnnotationReadingPriorityFunction implements PriorityFunction { void setRegionServer(final HRegionServer hrs) { this.hRegionServer = hrs; } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 93a78de1e07..403b41d04dd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -73,16 +73,16 @@ import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.NotServingRegionException; -import org.apache.hadoop.hbase.RegionTooBusyException; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.RegionTooBusyException; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.backup.HFileArchiver; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; @@ -113,11 +113,9 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.master.AssignmentManager; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index fc53c9a76d4..b29e39d21ea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.regionserver; -import javax.management.ObjectName; - import java.io.IOException; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.annotation.Retention; @@ -38,18 +36,20 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.UUID; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.TreeSet; +import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.locks.ReentrantReadWriteLock; +import javax.management.ObjectName; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.ClockOutOfSyncException; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HealthCheckChore; @@ -90,7 +89,6 @@ import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -120,7 +118,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest; @@ -138,7 +135,6 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRespon import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest; -import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState; @@ -152,8 +148,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; @@ -170,6 +166,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutation; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos; @@ -185,7 +184,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Regio import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest; -import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey; import org.apache.hadoop.hbase.regionserver.HRegion.Operation; import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; @@ -3305,108 +3303,115 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa // It is also the conduit via which we pass back data. PayloadCarryingRpcController controller = (PayloadCarryingRpcController)rpcc; CellScanner cellScanner = controller != null ? controller.cellScanner(): null; - // Clear scanner so we are not holding on to reference across call. if (controller != null) controller.setCellScanner(null); List cellsToReturn = null; + MultiResponse.Builder responseBuilder = MultiResponse.newBuilder(); try { - HRegion region = getRegion(request.getRegion()); - MultiResponse.Builder builder = MultiResponse.newBuilder(); - List mutations = new ArrayList(request.getActionCount()); - // Do a bunch of mutations atomically. Mutations are Puts and Deletes. NOT Gets. - if (request.hasAtomic() && request.getAtomic()) { - // MultiAction is union type. Has a Get or a Mutate. - for (ClientProtos.MultiAction actionUnion : request.getActionList()) { - if (actionUnion.hasMutation()) { - mutations.add(actionUnion.getMutation()); - } else { - throw new DoNotRetryIOException("Unsupported atomic action type: " + actionUnion); - } - } - // TODO: We are not updating a metric here. Should we up requestCount? - if (!mutations.isEmpty()) mutateRows(region, mutations, cellScanner); - } else { - // Do a bunch of Actions. - ActionResult.Builder resultBuilder = null; - cellsToReturn = new ArrayList(request.getActionCount()); - for (ClientProtos.MultiAction actionUnion : request.getActionList()) { + for (RegionMutation regionMutation: request.getRegionMutationList()) { + RegionMutationResult.Builder regionMutationResultBuilder = null; + HRegion region = getRegion(regionMutation.getRegion()); + if (regionMutation.hasAtomic() && regionMutation.getAtomic()) { this.requestCount.increment(); - ClientProtos.Result result = null; - try { - if (actionUnion.hasGet()) { - Get get = ProtobufUtil.toGet(actionUnion.getGet()); - Result r = region.get(get); - if (r != null) { - // Get a result with no data. The data will be carried alongside pbs, not as pbs. - result = ProtobufUtil.toResultNoData(r); - // Add the Result to controller so it gets serialized apart from pb. Get - // Results could be big so good if they are not serialized as pb. - cellsToReturn.add(r); - } - } else if (actionUnion.hasMutation()) { - MutationProto mutation = actionUnion.getMutation(); - MutationType type = mutation.getMutateType(); - if (type != MutationType.PUT && type != MutationType.DELETE) { - if (!mutations.isEmpty()) { - doBatchOp(builder, region, mutations, cellScanner); - mutations.clear(); - } else if (!region.getRegionInfo().isMetaTable()) { - cacheFlusher.reclaimMemStoreMemory(); - } - } - Result r = null; - switch (type) { - case APPEND: - r = append(region, mutation, cellScanner); - break; - case INCREMENT: - r = increment(region, mutation, cellScanner); - break; - case PUT: - case DELETE: - mutations.add(mutation); - break; - default: - throw new DoNotRetryIOException("Unsupported mutate type: " + type.name()); - } - if (r != null) { - // Put the data into the cellsToReturn and the metadata about the result is all that - // we will pass back in the protobuf result. - result = ProtobufUtil.toResultNoData(r); - cellsToReturn.add(r); - } - } else { - LOG.warn("Error: invalid action: " + actionUnion + ". " - + "it must be a Get, Mutate, or Exec."); - throw new DoNotRetryIOException("Invalid action, " - + "it must be a Get, Mutate, or Exec."); - } - if (result != null) { - if (resultBuilder == null) { - resultBuilder = ActionResult.newBuilder(); - } else { - resultBuilder.clear(); - } - resultBuilder.setValue(result); - builder.addResult(resultBuilder.build()); - } - } catch (IOException ie) { - builder.addResult(ResponseConverter.buildActionResult(ie)); - } - } - if (!mutations.isEmpty()) { - doBatchOp(builder, region, mutations, cellScanner); + mutateRows(region, regionMutation.getMutationList(), cellScanner); + } else { + regionMutationResultBuilder = RegionMutationResult.newBuilder(); + cellsToReturn = doNonAtomicRegionMutation(region, regionMutation, cellScanner, + regionMutationResultBuilder, cellsToReturn); } + // Have one regionmutationresult per regionmutation even if it is empty so we keep results + // aligned w/ how the requests came in. + responseBuilder.addRegionMutationResult(regionMutationResultBuilder == null? + RegionMutationResult.getDefaultInstance(): regionMutationResultBuilder.build()); } // Load the controller with the Cells to return. if (cellsToReturn != null && !cellsToReturn.isEmpty() && controller != null) { controller.setCellScanner(CellUtil.createCellScanner(cellsToReturn)); } - return builder.build(); + return responseBuilder.build(); } catch (IOException ie) { throw new ServiceException(ie); } } + /** + * Run through the regionMutation rm and per Mutation, do the work, and then when + * done, add an instance of a {@link ResultOrException} that corresponds to each Mutation. + * @param region + * @param rm + * @param cellScanner + * @param builder + * @param cellsToReturn Could be null. May be allocated in this method. This is what this + * method returns as a 'result'. + * @return Return the cellScanner passed + */ + private List doNonAtomicRegionMutation(final HRegion region, + final RegionMutation rm, final CellScanner cellScanner, + final RegionMutationResult.Builder builder, List cellsToReturn) { + // Gather up CONTIGUOUS Puts and Deletes in this mutations List. Idea is that rather than do + // one at a time, we instead pass them in batch. Be aware that the corresponding + // ResultOrException instance that matches each Put or Delete is then added down in the + // doBatchOp call. We should be staying aligned though the Put and Delete are deferred/batched + List mutations = null; + for (ClientProtos.MutationProto m: rm.getMutationList()) { + ClientProtos.ResultOrException resultOrException = null; + try { + Result r = null; + MutationType type = m.getMutateType(); + if (type != MutationType.PUT && type != MutationType.DELETE && mutations != null && + !mutations.isEmpty()) { + // Flush out any Puts or Deletes already collected. + doBatchOp(builder, region, mutations, cellScanner); + mutations.clear(); + } + switch (type) { + case APPEND: + r = append(region, m, cellScanner); + break; + case INCREMENT: + r = increment(region, m, cellScanner); + break; + case PUT: + case DELETE: + // Collect the individual mutations and apply in a batch + if (mutations == null) mutations = + new ArrayList(rm.getMutationCount()); + mutations.add(m); + break; + default: + throw new DoNotRetryIOException("Unsupported mutate type: " + type.name()); + } + if (r != null) { + ClientProtos.Result pbResult = null; + if (isClientCellBlockSupport()) { + pbResult = ProtobufUtil.toResultNoData(r); + // Hard to guess the size here. Just make a rough guess. + if (cellsToReturn == null) cellsToReturn = new ArrayList(256); + cellsToReturn.add(r); + } else { + pbResult = ProtobufUtil.toResult(r); + } + resultOrException = + ClientProtos.ResultOrException.newBuilder().setResult(pbResult).build(); + } + // Could get to here and there was no result and no exception. Presumes we added + // a Put or Delete to the collecting Mutations List for adding later. In this + // case the corresponding ResultOrException instance for the Put or Delete will be added + // down in the doBatchOp method call rather than up here. + } catch (IOException ie) { + resultOrException = ResultOrException.newBuilder(). + setException(ResponseConverter.buildException(ie)).build(); + } + if (resultOrException != null) { + builder.addResultOrException(resultOrException); + } + } + // Finish up any outstanding mutations + if (!mutations.isEmpty()) { + doBatchOp(builder, region, mutations, cellScanner); + } + return cellsToReturn; + } + // End Client methods // Start Admin methods @@ -3882,11 +3887,10 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa try { checkOpen(); List entries = request.getEntryList(); - if(entries == null || entries.isEmpty()) { + if (entries == null || entries.isEmpty()) { // empty input return ReplicateWALEntryResponse.newBuilder().build(); } - HRegion region = this.getRegionByEncodedName( entries.get(0).getKey().getEncodedRegionName().toStringUtf8()); RegionCoprocessorHost coprocessorHost = region.getCoprocessorHost(); @@ -4067,15 +4071,13 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa * @param region * @param mutations */ - protected void doBatchOp(final MultiResponse.Builder builder, final HRegion region, + protected void doBatchOp(final RegionMutationResult.Builder builder, final HRegion region, final List mutations, final CellScanner cells) { Mutation[] mArray = new Mutation[mutations.size()]; long before = EnvironmentEdgeManager.currentTimeMillis(); boolean batchContainsPuts = false, batchContainsDelete = false; + ResultOrException resultOrException = null; try { - ActionResult.Builder resultBuilder = ActionResult.newBuilder(); - resultBuilder.setValue(ClientProtos.Result.newBuilder().build()); - ActionResult result = resultBuilder.build(); int i = 0; for (MutationProto m : mutations) { Mutation mutation; @@ -4087,7 +4089,6 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa batchContainsDelete = true; } mArray[i++] = mutation; - builder.addResult(result); } requestCount.add(mutations.size()); @@ -4099,21 +4100,21 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa for (i = 0; i < codes.length; i++) { switch (codes[i].getOperationStatusCode()) { case BAD_FAMILY: - result = ResponseConverter.buildActionResult( + resultOrException = ResponseConverter.buildActionResult( new NoSuchColumnFamilyException(codes[i].getExceptionMsg())); - builder.setResult(i, result); + builder.setResultOrException(i, resultOrException); break; case SANITY_CHECK_FAILURE: - result = ResponseConverter.buildActionResult( + resultOrException = ResponseConverter.buildActionResult( new FailedSanityCheckException(codes[i].getExceptionMsg())); - builder.setResult(i, result); + builder.setResultOrException(i, resultOrException); break; default: - result = ResponseConverter.buildActionResult( + resultOrException = ResponseConverter.buildActionResult( new DoNotRetryIOException(codes[i].getExceptionMsg())); - builder.setResult(i, result); + builder.setResultOrException(i, resultOrException); break; case SUCCESS: @@ -4121,9 +4122,9 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa } } } catch (IOException ie) { - ActionResult result = ResponseConverter.buildActionResult(ie); + resultOrException = ResponseConverter.buildActionResult(ie); for (int i = 0; i < mutations.size(); i++) { - builder.setResult(i, result); + builder.setResultOrException(i, resultOrException); } } long after = EnvironmentEdgeManager.currentTimeMillis(); @@ -4145,8 +4146,9 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa * exceptionMessage if any * @throws IOException */ - protected OperationStatus[] doBatchOp(final HRegion region, - final List> mutations, boolean isReplay) throws IOException { + protected OperationStatus [] doBatchOp(final HRegion region, + final List> mutations, boolean isReplay) + throws IOException { Mutation[] mArray = new Mutation[mutations.size()]; long before = EnvironmentEdgeManager.currentTimeMillis(); boolean batchContainsPuts = false, batchContainsDelete = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java index 4368a179fa0..a5415e4a722 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java @@ -33,16 +33,14 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Action; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.RegionServerCallable; import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.RpcRetryingCaller; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -53,10 +51,10 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryR import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; -import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionMutationResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; @@ -118,7 +116,7 @@ public class WALEditsReplaySink { HRegionLocation loc = null; HLog.Entry entry = null; List regionEntries = null; - // Build the action list. + // Build the action list. for (int i = 0; i < batchSize; i++) { loc = entries.get(i).getFirst(); entry = entries.get(i).getSecond(); @@ -130,7 +128,7 @@ public class WALEditsReplaySink { } regionEntries.add(entry); } - + long startTime = EnvironmentEdgeManager.currentTimeMillis(); // replaying edits by region @@ -143,7 +141,7 @@ public class WALEditsReplaySink { for (; replayedActions < totalActions;) { curBatchSize = (totalActions > (MAX_BATCH_SIZE + replayedActions)) ? MAX_BATCH_SIZE : (totalActions - replayedActions); - replayEdits(loc, curRegion, allActions.subList(replayedActions, + replayEdits(loc, curRegion, allActions.subList(replayedActions, replayedActions + curBatchSize)); replayedActions += curBatchSize; } @@ -185,7 +183,7 @@ public class WALEditsReplaySink { } } } - + /** * Callable that handles the replay method call going against a single regionserver * @param @@ -202,7 +200,7 @@ public class WALEditsReplaySink { this.regionInfo = regionInfo; setLocation(regionLoc); } - + @Override public ReplicateWALEntryResponse call() throws IOException { try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java index bc8ede7c730..873e3ca8de8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java @@ -20,8 +20,8 @@ import static org.junit.Assert.assertEquals; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; -import org.apache.hadoop.hbase.util.Pair; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; @@ -43,11 +43,19 @@ public class TestQosFunction { checkMethod("ReplicateWALEntry", HConstants.REPLICATION_QOS, qosFunction); // Set method name in pb style with the method name capitalized. checkMethod("OpenRegion", HConstants.HIGH_QOS, qosFunction); + // Check multi works. + checkMethod("Multi", HConstants.NORMAL_QOS, qosFunction, MultiRequest.getDefaultInstance()); } - private void checkMethod(final String methodName, final int expected, final AnnotationReadingPriorityFunction qosf) { + private void checkMethod(final String methodName, final int expected, + final AnnotationReadingPriorityFunction qosf) { + checkMethod(methodName, expected, qosf, null); + } + + private void checkMethod(final String methodName, final int expected, + final AnnotationReadingPriorityFunction qosf, final Message param) { RequestHeader.Builder builder = RequestHeader.newBuilder(); builder.setMethodName(methodName); - assertEquals(methodName, expected, qosf.getPriority(builder.build(), null)); + assertEquals(methodName, expected, qosf.getPriority(builder.build(), param)); } } \ No newline at end of file