From 4d04f853083a2074bb538f87d9bd9938df358c7d Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Fri, 13 Apr 2012 20:28:21 +0000 Subject: [PATCH] HBASE-5443 Convert the client protocol of HRegionInterface to PB git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1325937 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hbase/catalog/MetaReader.java | 89 - .../hadoop/hbase/client/ClientScanner.java | 1 + .../hadoop/hbase/client/HBaseAdmin.java | 58 +- .../hadoop/hbase/client/HConnection.java | 14 + .../hbase/client/HConnectionManager.java | 189 +- .../apache/hadoop/hbase/client/HTable.java | 152 +- .../hadoop/hbase/client/ScannerCallable.java | 124 +- .../hadoop/hbase/client/ServerCallable.java | 10 +- .../hadoop/hbase/filter/ParseConstants.java | 7 +- .../hadoop/hbase/io/HbaseObjectWritable.java | 7 +- .../org/apache/hadoop/hbase/io/TimeRange.java | 8 + .../hadoop/hbase/ipc/ExecRPCInvoker.java | 22 +- .../apache/hadoop/hbase/ipc/Invocation.java | 49 +- .../hadoop/hbase/ipc/WritableRpcEngine.java | 6 + .../mapreduce/LoadIncrementalHFiles.java | 11 +- .../hadoop/hbase/protobuf/AdminProtocol.java | 37 + .../hadoop/hbase/protobuf/ClientProtocol.java | 39 + .../hadoop/hbase/protobuf/ProtobufUtil.java | 667 +- .../hbase/protobuf/RequestConverter.java | 782 ++ .../hbase/protobuf/ResponseConverter.java | 187 + ...egionAdminProtos.java => AdminProtos.java} | 3316 ++++--- ...ionClientProtos.java => ClientProtos.java} | 8059 ++++++++--------- .../hbase/protobuf/generated/HBaseProtos.java | 1051 ++- .../hbase/regionserver/HRegionServer.java | 312 +- .../regionserver/HRegionThriftServer.java | 24 +- .../hadoop/hbase/regionserver/Leases.java | 2 +- .../hbase/regionserver/RegionServer.java | 1164 +++ .../{RegionAdmin.proto => Admin.proto} | 8 +- .../{RegionClient.proto => Client.proto} | 118 +- src/main/protobuf/hbase.proto | 13 + .../hbase/catalog/TestCatalogTracker.java | 46 +- .../TestMetaReaderEditorNoCluster.java | 58 +- .../client/HConnectionTestingUtility.java | 9 +- .../hbase/io/TestHbaseObjectWritable.java | 4 +- .../hadoop/hbase/master/MockRegionServer.java | 110 +- .../hbase/master/TestAssignmentManager.java | 62 +- .../hbase/master/TestCatalogJanitor.java | 19 +- .../hbase/master/TestMasterNoCluster.java | 4 +- .../hbase/regionserver/OOMERegionServer.java | 19 +- .../TestHRegionServerBulkLoad.java | 9 +- 40 files changed, 9874 insertions(+), 6992 deletions(-) create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java rename src/main/java/org/apache/hadoop/hbase/protobuf/generated/{RegionAdminProtos.java => AdminProtos.java} (74%) rename src/main/java/org/apache/hadoop/hbase/protobuf/generated/{RegionClientProtos.java => ClientProtos.java} (69%) create mode 100644 src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java rename src/main/protobuf/{RegionAdmin.proto => Admin.proto} (96%) rename src/main/protobuf/{RegionClient.proto => Client.proto} (78%) diff --git a/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java b/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java index 0129ee94b5a..238740a2688 100644 --- a/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java +++ b/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java @@ -31,18 +31,15 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.ipc.RemoteException; /** * Reads region and assignment information from .META.. @@ -252,19 +249,6 @@ public class MetaReader { } } - /** - * Reads the location of META from ROOT. - * @param metaServer connection to server hosting ROOT - * @return location of META in ROOT where location, or null if not available - * @throws IOException - * @deprecated Does not retry; use #getMetaRegionLocation(CatalogTracker) - */ - public static ServerName readMetaLocation(HRegionInterface metaServer) - throws IOException { - return readLocation(metaServer, CatalogTracker.ROOT_REGION_NAME, - CatalogTracker.META_REGION_NAME); - } - /** * Gets the location of .META. region by reading content of * -ROOT-. @@ -292,50 +276,6 @@ public class MetaReader { return (pair == null || pair.getSecond() == null)? null: pair.getSecond(); } - // TODO: Remove when deprecated dependencies are removed. - private static ServerName readLocation(HRegionInterface metaServer, - byte [] catalogRegionName, byte [] regionName) - throws IOException { - Result r = null; - try { - r = metaServer.get(catalogRegionName, - new Get(regionName). - addColumn(HConstants.CATALOG_FAMILY, - HConstants.SERVER_QUALIFIER). - addColumn(HConstants.CATALOG_FAMILY, - HConstants.STARTCODE_QUALIFIER)); - } catch (java.net.SocketTimeoutException e) { - // Treat this exception + message as unavailable catalog table. Catch it - // and fall through to return a null - } catch (java.net.SocketException e) { - // Treat this exception + message as unavailable catalog table. Catch it - // and fall through to return a null - } catch (RemoteException re) { - IOException ioe = re.unwrapRemoteException(); - if (ioe instanceof NotServingRegionException) { - // Treat this NSRE as unavailable table. Catch and fall through to - // return null below - } else if (ioe.getMessage().contains("Server not running")) { - // Treat as unavailable table. - } else { - throw re; - } - } catch (IOException e) { - if (e.getCause() != null && e.getCause() instanceof IOException && - e.getCause().getMessage() != null && - e.getCause().getMessage().contains("Connection reset by peer")) { - // Treat this exception + message as unavailable catalog table. Catch it - // and fall through to return a null - } else { - throw e; - } - } - if (r == null || r.isEmpty()) { - return null; - } - return getServerNameFromCatalogResult(r); - } - /** * Gets the region info and assignment for the specified region. * @param catalogTracker @@ -654,35 +594,6 @@ public class MetaReader { fullScan(catalogTracker, v); } - /** - * Fully scan a given region, on a given server starting with given row. - * @param hRegionInterface region server - * @param visitor visitor - * @param regionName name of region - * @param startrow start row - * @throws IOException - * @deprecated Does not retry; use fullScan xxx instead. - x - */ - public static void fullScan(HRegionInterface hRegionInterface, - Visitor visitor, final byte[] regionName, - byte[] startrow) throws IOException { - if (hRegionInterface == null) return; - Scan scan = new Scan(); - if (startrow != null) scan.setStartRow(startrow); - scan.addFamily(HConstants.CATALOG_FAMILY); - long scannerid = hRegionInterface.openScanner(regionName, scan); - try { - Result data; - while((data = hRegionInterface.next(scannerid)) != null) { - if (!data.isEmpty()) visitor.visit(data); - } - } finally { - hRegionInterface.close(scannerid); - } - return; - } - /** * Performs a full scan of a catalog table. * @param catalogTracker diff --git a/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 3167f233cf8..2afdb068cc7 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -307,6 +307,7 @@ public class ClientScanner extends AbstractClientScanner { } // Clear region this.currentRegion = null; + callable = null; continue; } long currentTime = System.currentTimeMillis(); diff --git a/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 16e401797d8..ee16e720d32 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -19,6 +19,17 @@ */ package org.apache.hadoop.hbase.client; +import java.io.Closeable; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.net.SocketTimeoutException; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Pattern; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -45,6 +56,12 @@ import org.apache.hadoop.hbase.catalog.MetaReader; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -54,16 +71,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.zookeeper.KeeperException; -import java.io.Closeable; -import java.io.IOException; -import java.io.InterruptedIOException; -import java.net.SocketTimeoutException; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.regex.Pattern; +import com.google.protobuf.ServiceException; /** * Provides an interface to manage HBase database table metadata + general @@ -497,23 +505,27 @@ public class HBaseAdmin implements Abortable, Closeable { }); // Wait until all regions deleted - HRegionInterface server = - connection.getHRegionConnection(firstMetaServer.getHostname(), firstMetaServer.getPort()); + ClientProtocol server = + connection.getClient(firstMetaServer.getHostname(), firstMetaServer.getPort()); for (int tries = 0; tries < (this.numRetries * this.retryLongerMultiplier); tries++) { - long scannerId = -1L; try { Scan scan = MetaReader.getScanForTableName(tableName); - scan.addColumn(HConstants.CATALOG_FAMILY, - HConstants.REGIONINFO_QUALIFIER); - scannerId = server.openScanner( - firstMetaServer.getRegionInfo().getRegionName(), scan); + scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); + ScanRequest request = RequestConverter.buildScanRequest( + firstMetaServer.getRegionInfo().getRegionName(), scan, 1, true); + Result[] values = null; // Get a batch at a time. - Result values = server.next(scannerId); + try { + ScanResponse response = server.scan(null, request); + values = ResponseConverter.getResults(response); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } // let us wait until .META. table is updated and // HMaster removes the table from its HTableDescriptors - if (values == null) { + if (values == null || values.length == 0) { boolean tableExists = false; HTableDescriptor[] htds; MasterKeepAliveConnection master = connection.getKeepAliveMaster(); @@ -542,14 +554,6 @@ public class HBaseAdmin implements Abortable, Closeable { throw ex; } } - } finally { - if (scannerId != -1L) { - try { - server.close(scannerId); - } catch (IOException ex) { - LOG.warn(ex); - } - } } try { Thread.sleep(getPauseTime(tries)); diff --git a/src/main/java/org/apache/hadoop/hbase/client/HConnection.java b/src/main/java/org/apache/hadoop/hbase/client/HConnection.java index 5d43086663b..23f8e5ac8e0 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HConnection.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HConnection.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; /** @@ -220,6 +221,19 @@ public interface HConnection extends Abortable, Closeable { public HRegionInterface getHRegionConnection(final String hostname, final int port) throws IOException; + /** + * Establishes a connection to the region server at the specified address, and return + * a region client protocol. + * + * @param hostname RegionServer hostname + * @param port RegionServer port + * @return ClientProtocol proxy for RegionServer + * @throws IOException if a remote or network exception occurs + * + */ + public ClientProtocol getClient(final String hostname, final int port) + throws IOException; + /** * Establishes a connection to the region server at the specified address. * @param regionServer - the server to connect to diff --git a/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java index 0b783ced050..820e2a90499 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java @@ -19,6 +19,33 @@ */ package org.apache.hadoop.hbase.client; +import java.io.Closeable; +import java.io.IOException; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.lang.reflect.UndeclaredThrowableException; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -46,6 +73,13 @@ import org.apache.hadoop.hbase.ipc.ExecRPCInvoker; import org.apache.hadoop.hbase.ipc.HBaseRPC; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.ipc.VersionedProtocol; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -61,32 +95,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.ipc.RemoteException; import org.apache.zookeeper.KeeperException; -import java.io.Closeable; -import java.io.IOException; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Proxy; -import java.lang.reflect.UndeclaredThrowableException; -import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.TreeMap; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArraySet; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; +import com.google.protobuf.ServiceException; /** * A non-instantiable class that manages {@link HConnection}s. @@ -146,6 +155,12 @@ public class HConnectionManager { public static final int MAX_CACHED_HBASE_INSTANCES; + /** Parameter name for what client protocol to use. */ + public static final String CLIENT_PROTOCOL_CLASS = "hbase.clientprotocol.class"; + + /** Default client protocol class name. */ + public static final String DEFAULT_CLIENT_PROTOCOL_CLASS = ClientProtocol.class.getName(); + private static final Log LOG = LogFactory.getLog(HConnectionManager.class); static { @@ -493,6 +508,7 @@ public class HConnectionManager { static class HConnectionImplementation implements HConnection, Closeable { static final Log LOG = LogFactory.getLog(HConnectionImplementation.class); private final Class serverInterfaceClass; + private final Class clientClass; private final long pause; private final int numRetries; private final int maxRPCAttempts; @@ -521,8 +537,8 @@ public class HConnectionManager { private final Configuration conf; // Known region HServerAddress.toString() -> HRegionInterface - private final Map servers = - new ConcurrentHashMap(); + private final ConcurrentHashMap> servers = + new ConcurrentHashMap>(); private final ConcurrentHashMap connectionLock = new ConcurrentHashMap(); @@ -570,6 +586,15 @@ public class HConnectionManager { throw new UnsupportedOperationException( "Unable to find region server interface " + serverClassName, e); } + String clientClassName = conf.get(CLIENT_PROTOCOL_CLASS, + DEFAULT_CLIENT_PROTOCOL_CLASS); + try { + this.clientClass = + (Class) Class.forName(clientClassName); + } catch (ClassNotFoundException e) { + throw new UnsupportedOperationException( + "Unable to find client protocol " + clientClassName, e); + } this.pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE, HConstants.DEFAULT_HBASE_CLIENT_PAUSE); this.numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, @@ -1328,19 +1353,29 @@ public class HConnectionManager { return getHRegionConnection(hostname, port, false); } + @Override + public ClientProtocol getClient( + final String hostname, final int port) throws IOException { + return (ClientProtocol)getProtocol(hostname, port, + clientClass, ClientProtocol.VERSION); + } + @Override @Deprecated public HRegionInterface getHRegionConnection(HServerAddress hsa, boolean master) throws IOException { - return getHRegionConnection(null, -1, hsa.getInetSocketAddress(), master); + String hostname = hsa.getInetSocketAddress().getHostName(); + int port = hsa.getInetSocketAddress().getPort(); + return getHRegionConnection(hostname, port, master); } @Override public HRegionInterface getHRegionConnection(final String hostname, final int port, final boolean master) throws IOException { - return getHRegionConnection(hostname, port, null, master); + return (HRegionInterface)getProtocol(hostname, port, + serverInterfaceClass, HRegionInterface.VERSION); } /** @@ -1348,43 +1383,44 @@ public class HConnectionManager { * can be but not both. * @param hostname * @param port - * @param isa - * @param master + * @param protocolClass + * @param version * @return Proxy. * @throws IOException */ - HRegionInterface getHRegionConnection(final String hostname, final int port, - final InetSocketAddress isa, final boolean master) - throws IOException { - HRegionInterface server; - String rsName = null; - if (isa != null) { - rsName = Addressing.createHostAndPortStr(isa.getHostName(), - isa.getPort()); - } else { - rsName = Addressing.createHostAndPortStr(hostname, port); - } + VersionedProtocol getProtocol(final String hostname, + final int port, final Class protocolClass, + final long version) throws IOException { + String rsName = Addressing.createHostAndPortStr(hostname, port); // See if we already have a connection (common case) - server = this.servers.get(rsName); + Map protocols = this.servers.get(rsName); + if (protocols == null) { + protocols = new HashMap(); + Map existingProtocols = + this.servers.putIfAbsent(rsName, protocols); + if (existingProtocols != null) { + protocols = existingProtocols; + } + } + String protocol = protocolClass.getName(); + VersionedProtocol server = protocols.get(protocol); if (server == null) { - // create a unique lock for this RS (if necessary) - this.connectionLock.putIfAbsent(rsName, rsName); + // create a unique lock for this RS + protocol (if necessary) + String lockKey = protocol + "@" + rsName; + this.connectionLock.putIfAbsent(lockKey, lockKey); // get the RS lock - synchronized (this.connectionLock.get(rsName)) { + synchronized (this.connectionLock.get(lockKey)) { // do one more lookup in case we were stalled above - server = this.servers.get(rsName); + server = protocols.get(protocol); if (server == null) { try { // Only create isa when we need to. - InetSocketAddress address = isa != null? isa: - new InetSocketAddress(hostname, port); + InetSocketAddress address = new InetSocketAddress(hostname, port); // definitely a cache miss. establish an RPC for this RS - server = (HRegionInterface) HBaseRPC.waitForProxy( - serverInterfaceClass, HRegionInterface.VERSION, - address, this.conf, + server = HBaseRPC.waitForProxy( + protocolClass, version, address, this.conf, this.maxRPCAttempts, this.rpcTimeout, this.rpcTimeout); - this.servers.put(Addressing.createHostAndPortStr( - address.getHostName(), address.getPort()), server); + protocols.put(protocol, server); } catch (RemoteException e) { LOG.warn("RemoteException connecting to RS", e); // Throw what the RemoteException was carrying. @@ -1679,11 +1715,42 @@ public class HConnectionManager { ServerCallable callable = new ServerCallable(connection, tableName, null) { public MultiResponse call() throws IOException { - return server.multi(multi); + try { + MultiResponse response = new MultiResponse(); + for (Map.Entry>> e: multi.actions.entrySet()) { + byte[] regionName = e.getKey(); + int rowMutations = 0; + List> actions = e.getValue(); + for (Action action: actions) { + Row row = action.getAction(); + if (row instanceof RowMutations) { + MultiRequest request = + RequestConverter.buildMultiRequest(regionName, (RowMutations)row); + server.multi(null, request); + response.add(regionName, action.getOriginalIndex(), new Result()); + rowMutations++; + } + } + if (actions.size() > rowMutations) { + MultiRequest request = + RequestConverter.buildMultiRequest(regionName, actions); + ClientProtos.MultiResponse + proto = server.multi(null, request); + List results = ResponseConverter.getResults(proto); + for (int i = 0, n = results.size(); i < n; i++) { + int originalIndex = actions.get(i).getOriginalIndex(); + response.add(regionName, originalIndex, results.get(i)); + } + } + } + return response; + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } @Override public void connect(boolean reload) throws IOException { - server = connection.getHRegionConnection( + server = connection.getClient( loc.getHostname(), loc.getPort()); } }; @@ -1885,7 +1952,7 @@ public class HConnectionManager { } } } catch (ExecutionException e) { - LOG.warn("Failed all from " + loc, e); + LOG.debug("Failed all from " + loc, e); } } @@ -2077,8 +2144,10 @@ public class HConnectionManager { delayedClosing.stop("Closing connection"); if (stopProxy) { closeMaster(); - for (HRegionInterface i : servers.values()) { - HBaseRPC.stopProxy(i); + for (Map i : servers.values()) { + for (VersionedProtocol server: i.values()) { + HBaseRPC.stopProxy(server); + } } } closeZooKeeperWatcher(); diff --git a/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/src/main/java/org/apache/hadoop/hbase/client/HTable.java index aa7652f6a07..2c87d50d429 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -53,13 +53,27 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.ExecRPCInvoker; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Writables; +import com.google.protobuf.ServiceException; + /** *

Used to communicate with a single HBase table. * @@ -648,8 +662,15 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public Result call() throws IOException { - return server.getClosestRowBefore(location.getRegionInfo().getRegionName(), - row, family); + try { + GetRequest request = RequestConverter.buildGetRequest( + location.getRegionInfo().getRegionName(), row, family, true); + GetResponse response = server.get(null, request); + if (!response.hasResult()) return null; + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -694,7 +715,14 @@ public class HTable implements HTableInterface { public Result get(final Get get) throws IOException { return new ServerCallable(connection, tableName, get.getRow(), operationTimeout) { public Result call() throws IOException { - return server.get(location.getRegionInfo().getRegionName(), get); + try { + GetRequest request = RequestConverter.buildGetRequest( + location.getRegionInfo().getRegionName(), get); + GetResponse response = server.get(null, request); + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -746,13 +774,18 @@ public class HTable implements HTableInterface { @Override public void delete(final Delete delete) throws IOException { - new ServerCallable(connection, tableName, delete.getRow(), - operationTimeout) { - public Void call() throws IOException { - server.delete(location.getRegionInfo().getRegionName(), delete); - return null; - } - }.withRetries(); + new ServerCallable(connection, tableName, delete.getRow(), operationTimeout) { + public Boolean call() throws IOException { + try { + MutateRequest request = RequestConverter.buildMutateRequest( + location.getRegionInfo().getRegionName(), delete); + MutateResponse response = server.mutate(null, request); + return Boolean.valueOf(response.getProcessed()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + } + }.withRetries(); } /** @@ -821,7 +854,13 @@ public class HTable implements HTableInterface { new ServerCallable(connection, tableName, rm.getRow(), operationTimeout) { public Void call() throws IOException { - server.mutateRow(location.getRegionInfo().getRegionName(), rm); + try { + MultiRequest request = RequestConverter.buildMultiRequest( + location.getRegionInfo().getRegionName(), rm); + server.multi(null, request); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } return null; } }.withRetries(); @@ -838,8 +877,15 @@ public class HTable implements HTableInterface { } return new ServerCallable(connection, tableName, append.getRow(), operationTimeout) { public Result call() throws IOException { - return server.append( + try { + MutateRequest request = RequestConverter.buildMutateRequest( location.getRegionInfo().getRegionName(), append); + MutateResponse response = server.mutate(null, request); + if (!response.hasResult()) return null; + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -855,8 +901,14 @@ public class HTable implements HTableInterface { } return new ServerCallable(connection, tableName, increment.getRow(), operationTimeout) { public Result call() throws IOException { - return server.increment( + try { + MutateRequest request = RequestConverter.buildMutateRequest( location.getRegionInfo().getRegionName(), increment); + MutateResponse response = server.mutate(null, request); + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -890,9 +942,16 @@ public class HTable implements HTableInterface { } return new ServerCallable(connection, tableName, row, operationTimeout) { public Long call() throws IOException { - return server.incrementColumnValue( + try { + MutateRequest request = RequestConverter.buildMutateRequest( location.getRegionInfo().getRegionName(), row, family, qualifier, amount, writeToWAL); + MutateResponse response = server.mutate(null, request); + Result result = ProtobufUtil.toResult(response.getResult()); + return Long.valueOf(Bytes.toLong(result.getValue(family, qualifier))); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -907,8 +966,15 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public Boolean call() throws IOException { - return server.checkAndPut(location.getRegionInfo().getRegionName(), - row, family, qualifier, value, put) ? Boolean.TRUE : Boolean.FALSE; + try { + MutateRequest request = RequestConverter.buildMutateRequest( + location.getRegionInfo().getRegionName(), row, family, qualifier, + new BinaryComparator(value), CompareType.EQUAL, put); + MutateResponse response = server.mutate(null, request); + return Boolean.valueOf(response.getProcessed()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -924,10 +990,15 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public Boolean call() throws IOException { - return server.checkAndDelete( - location.getRegionInfo().getRegionName(), - row, family, qualifier, value, delete) - ? Boolean.TRUE : Boolean.FALSE; + try { + MutateRequest request = RequestConverter.buildMutateRequest( + location.getRegionInfo().getRegionName(), row, family, qualifier, + new BinaryComparator(value), CompareType.EQUAL, delete); + MutateResponse response = server.mutate(null, request); + return Boolean.valueOf(response.getProcessed()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -939,8 +1010,14 @@ public class HTable implements HTableInterface { public boolean exists(final Get get) throws IOException { return new ServerCallable(connection, tableName, get.getRow(), operationTimeout) { public Boolean call() throws IOException { - return server. - exists(location.getRegionInfo().getRegionName(), get); + try { + GetRequest request = RequestConverter.buildGetRequest( + location.getRegionInfo().getRegionName(), get, true); + GetResponse response = server.get(null, request); + return response.getExists(); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -1026,9 +1103,14 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public RowLock call() throws IOException { - long lockId = - server.lockRow(location.getRegionInfo().getRegionName(), row); - return new RowLock(row,lockId); + try { + LockRowRequest request = RequestConverter.buildLockRowRequest( + location.getRegionInfo().getRegionName(), row); + LockRowResponse response = server.lockRow(null, request); + return new RowLock(row, response.getLockId()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -1039,14 +1121,18 @@ public class HTable implements HTableInterface { @Override public void unlockRow(final RowLock rl) throws IOException { - new ServerCallable(connection, tableName, rl.getRow(), - operationTimeout) { - public Void call() throws IOException { - server.unlockRow(location.getRegionInfo().getRegionName(), rl - .getLockId()); - return null; - } - }.withRetries(); + new ServerCallable(connection, tableName, rl.getRow(), operationTimeout) { + public Boolean call() throws IOException { + try { + UnlockRowRequest request = RequestConverter.buildUnlockRowRequest( + location.getRegionInfo().getRegionName(), rl.getLockId()); + server.unlockRow(null, request); + return Boolean.TRUE; + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + } + }.withRetries(); } /** diff --git a/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 9903df39451..fe80fcf424f 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -25,15 +25,22 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RemoteExceptionHandler; +import org.apache.hadoop.hbase.client.metrics.ScanMetrics; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; +import com.google.protobuf.ServiceException; + /** * Retries scanner operations such as create, next, etc. * Used by {@link ResultScanner}s made by {@link HTable}. @@ -107,41 +114,58 @@ public class ScannerCallable extends ServerCallable { * @see java.util.concurrent.Callable#call() */ public Result [] call() throws IOException { - if (scannerId != -1L && closed) { - close(); - } else if (scannerId == -1L && !closed) { - this.scannerId = openScanner(); - } else { - Result [] rrs = null; - try { - incRPCcallsMetrics(); - rrs = server.next(scannerId, caching); - updateResultsMetrics(rrs); - } catch (IOException e) { - IOException ioe = null; - if (e instanceof RemoteException) { - ioe = RemoteExceptionHandler.decodeRemoteException((RemoteException)e); - } - if (ioe == null) throw new IOException(e); - if (ioe instanceof NotServingRegionException) { - // Throw a DNRE so that we break out of cycle of calling NSRE - // when what we need is to open scanner against new location. - // Attach NSRE to signal client that it needs to resetup scanner. - if (this.scanMetrics != null) { - this.scanMetrics.countOfNSRE.inc(); - } - throw new DoNotRetryIOException("Reset scanner", ioe); - } else if (ioe instanceof RegionServerStoppedException) { - // Throw a DNRE so that we break out of cycle of calling RSSE - // when what we need is to open scanner against new location. - // Attach RSSE to signal client that it needs to resetup scanner. - throw new DoNotRetryIOException("Reset scanner", ioe); - } else { - // The outer layers will retry - throw ioe; - } + if (closed) { + if (scannerId != -1) { + close(); + } + } else { + if (scannerId == -1L) { + this.scannerId = openScanner(); + } else { + Result [] rrs = null; + try { + incRPCcallsMetrics(); + ScanRequest request = + RequestConverter.buildScanRequest(scannerId, caching, false); + try { + ScanResponse response = server.scan(null, request); + rrs = ResponseConverter.getResults(response); + if (response.hasMoreResults() + && !response.getMoreResults()) { + scannerId = -1L; + closed = true; + return null; + } + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + updateResultsMetrics(rrs); + } catch (IOException e) { + IOException ioe = null; + if (e instanceof RemoteException) { + ioe = RemoteExceptionHandler.decodeRemoteException((RemoteException)e); + } + if (ioe == null) throw new IOException(e); + if (ioe instanceof NotServingRegionException) { + // Throw a DNRE so that we break out of cycle of calling NSRE + // when what we need is to open scanner against new location. + // Attach NSRE to signal client that it needs to resetup scanner. + if (this.scanMetrics != null) { + this.scanMetrics.countOfNSRE.inc(); + } + throw new DoNotRetryIOException("Reset scanner", ioe); + } else if (ioe instanceof RegionServerStoppedException) { + // Throw a DNRE so that we break out of cycle of calling RSSE + // when what we need is to open scanner against new location. + // Attach RSSE to signal client that it needs to resetup scanner. + throw new DoNotRetryIOException("Reset scanner", ioe); + } else { + // The outer layers will retry + throw ioe; + } + } + return rrs; } - return rrs; } return null; } @@ -161,10 +185,12 @@ public class ScannerCallable extends ServerCallable { return; } for (Result rr : rrs) { - this.scanMetrics.countOfBytesInResults.inc(rr.getBytes().getLength()); - if (isRegionServerRemote) { - this.scanMetrics.countOfBytesInRemoteResults.inc( - rr.getBytes().getLength()); + if (rr.getBytes() != null) { + this.scanMetrics.countOfBytesInResults.inc(rr.getBytes().getLength()); + if (isRegionServerRemote) { + this.scanMetrics.countOfBytesInRemoteResults.inc( + rr.getBytes().getLength()); + } } } } @@ -175,7 +201,13 @@ public class ScannerCallable extends ServerCallable { } try { incRPCcallsMetrics(); - this.server.close(this.scannerId); + ScanRequest request = + RequestConverter.buildScanRequest(this.scannerId, 0, true); + try { + server.scan(null, request); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } catch (IOException e) { LOG.warn("Ignore, probably already closed", e); } @@ -184,8 +216,16 @@ public class ScannerCallable extends ServerCallable { protected long openScanner() throws IOException { incRPCcallsMetrics(); - return this.server.openScanner(this.location.getRegionInfo().getRegionName(), - this.scan); + ScanRequest request = + RequestConverter.buildScanRequest( + this.location.getRegionInfo().getRegionName(), + this.scan, 0, false); + try { + ScanResponse response = server.scan(null, request); + return response.getScannerId(); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } protected Scan getScan() { diff --git a/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java b/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java index ddcf9ad6dcd..2a9d86ed5d5 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java +++ b/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ipc.HBaseRPC; -import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.ipc.RemoteException; @@ -57,7 +57,7 @@ public abstract class ServerCallable implements Callable { protected final byte [] tableName; protected final byte [] row; protected HRegionLocation location; - protected HRegionInterface server; + protected ClientProtocol server; protected int callTimeout; protected long startTime, endTime; @@ -84,8 +84,8 @@ public abstract class ServerCallable implements Callable { */ public void connect(final boolean reload) throws IOException { this.location = connection.getRegionLocation(tableName, row, reload); - this.server = connection.getHRegionConnection(location.getHostname(), - location.getPort()); + this.server = connection.getClient(location.getHostname(), + location.getPort()); } /** @return the server name @@ -224,7 +224,7 @@ public abstract class ServerCallable implements Callable { } } - private static Throwable translateException(Throwable t) throws IOException { + protected static Throwable translateException(Throwable t) throws IOException { if (t instanceof UndeclaredThrowableException) { t = t.getCause(); } diff --git a/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java b/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java index 1acbdaba0ad..a179bf31c05 100644 --- a/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java +++ b/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java @@ -19,13 +19,10 @@ */ package org.apache.hadoop.hbase.filter; +import java.nio.ByteBuffer; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.ipc.HRegionInterface; -import org.apache.hadoop.hbase.util.Bytes; -import java.nio.ByteBuffer; -import java.util.HashMap; -import org.apache.hadoop.hbase.filter.*; /** * ParseConstants holds a bunch of constants related to parsing Filter Strings diff --git a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java index cbfa48987c7..35b2c8b01cd 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java +++ b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java @@ -100,6 +100,7 @@ import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; import com.google.protobuf.Message; +import com.google.protobuf.RpcController; /** * This is a customized version of the polymorphic hadoop @@ -268,6 +269,8 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur GENERIC_ARRAY_CODE = code++; addToMap(Array.class, GENERIC_ARRAY_CODE); + addToMap(RpcController.class, code++); + // make sure that this is the last statement in this static block NEXT_CLASS_CODE = code; } @@ -357,7 +360,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur } } - static Integer getClassCode(final Class c) + public static Integer getClassCode(final Class c) throws IOException { Integer code = CLASS_TO_CODE.get(c); if (code == null ) { @@ -726,7 +729,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur * @return the instantiated Message instance * @throws IOException if an IO problem occurs */ - private static Message tryInstantiateProtobuf( + public static Message tryInstantiateProtobuf( Class protoClass, DataInput dataIn) throws IOException { diff --git a/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java b/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java index d13539360df..5189c8ca4c1 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java +++ b/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java @@ -109,6 +109,14 @@ public class TimeRange implements Writable { return maxStamp; } + /** + * Check if it is for all time + * @return true if it is for all time + */ + public boolean isAllTime() { + return allTime; + } + /** * Check if the specified timestamp is within this TimeRange. *

diff --git a/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java b/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java index 05ae7174ae9..d71e97ed84d 100644 --- a/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java +++ b/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java @@ -19,18 +19,23 @@ */ package org.apache.hadoop.hbase.ipc; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.client.coprocessor.Exec; import org.apache.hadoop.hbase.client.coprocessor.ExecResult; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse; import org.apache.hadoop.hbase.util.Bytes; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.Method; - /** * Backs a {@link CoprocessorProtocol} subclass proxy and forwards method * invocations for server execution. Note that internally this will issue a @@ -74,8 +79,13 @@ public class ExecRPCInvoker implements InvocationHandler { ServerCallable callable = new ServerCallable(connection, table, row) { public ExecResult call() throws Exception { - return server.execCoprocessor(location.getRegionInfo().getRegionName(), - exec); + byte[] regionName = location.getRegionInfo().getRegionName(); + ExecCoprocessorRequest request = + RequestConverter.buildExecCoprocessorRequest(regionName, exec); + ExecCoprocessorResponse response = + server.execCoprocessor(null, request); + Object value = ProtobufUtil.toObject(response.getValue()); + return new ExecResult(regionName, value); } }; ExecResult result = callable.withRetries(); diff --git a/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java b/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java index f1f06b0be61..b7afa58db0c 100644 --- a/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java +++ b/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java @@ -19,18 +19,22 @@ */ package org.apache.hadoop.hbase.ipc; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.io.VersionMismatchException; -import org.apache.hadoop.io.VersionedWritable; - import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; +import org.apache.hadoop.io.VersionMismatchException; +import org.apache.hadoop.io.VersionedWritable; /** A method invocation, including the method name and its parameters.*/ @InterfaceAudience.Private @@ -43,6 +47,17 @@ public class Invocation extends VersionedWritable implements Configurable { private long clientVersion; private int clientMethodsHash; + + // For generated protocol classes which don't have VERSION field, + // such as protobuf interfaces. + private static final Map, Long> + PROTOCOL_VERSION = new HashMap, Long>(); + + static { + PROTOCOL_VERSION.put(ClientService.BlockingInterface.class, + Long.valueOf(ClientProtocol.VERSION)); + } + private static byte RPC_VERSION = 1; public Invocation() {} @@ -51,22 +66,28 @@ public class Invocation extends VersionedWritable implements Configurable { this.methodName = method.getName(); this.parameterClasses = method.getParameterTypes(); this.parameters = parameters; - if (method.getDeclaringClass().equals(VersionedProtocol.class)) { + Class declaringClass = method.getDeclaringClass(); + if (declaringClass.equals(VersionedProtocol.class)) { //VersionedProtocol is exempted from version check. clientVersion = 0; clientMethodsHash = 0; } else { try { - Field versionField = method.getDeclaringClass().getField("VERSION"); - versionField.setAccessible(true); - this.clientVersion = versionField.getLong(method.getDeclaringClass()); + Long version = PROTOCOL_VERSION.get(declaringClass); + if (version != null) { + this.clientVersion = version.longValue(); + } else { + Field versionField = declaringClass.getField("VERSION"); + versionField.setAccessible(true); + this.clientVersion = versionField.getLong(declaringClass); + } } catch (NoSuchFieldException ex) { - throw new RuntimeException("The " + method.getDeclaringClass(), ex); + throw new RuntimeException("The " + declaringClass, ex); } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } - this.clientMethodsHash = ProtocolSignature.getFingerprint(method - .getDeclaringClass().getMethods()); + this.clientMethodsHash = ProtocolSignature.getFingerprint( + declaringClass.getMethods()); } } diff --git a/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java b/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java index 0573c684668..9f159f26be1 100644 --- a/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java +++ b/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.io.HbaseObjectWritable; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Objects; @@ -52,6 +53,8 @@ import org.apache.hadoop.conf.*; import org.codehaus.jackson.map.ObjectMapper; +import com.google.protobuf.ServiceException; + /** An RpcEngine implementation for Writable data. */ @InterfaceAudience.Private class WritableRpcEngine implements RpcEngine { @@ -407,6 +410,9 @@ class WritableRpcEngine implements RpcEngine { if (target instanceof IOException) { throw (IOException)target; } + if (target instanceof ServiceException) { + throw ProtobufUtil.getRemoteException((ServiceException)target); + } IOException ioe = new IOException(target.toString()); ioe.setStackTrace(target.getStackTrace()); throw ioe; diff --git a/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index b71ae667232..d0570b98e14 100644 --- a/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -68,10 +68,13 @@ import org.apache.hadoop.hbase.io.Reference.Range; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; -import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; @@ -486,7 +489,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool { LOG.debug("Going to connect to server " + location + " for row " + Bytes.toStringBinary(row)); byte[] regionName = location.getRegionInfo().getRegionName(); - return server.bulkLoadHFiles(famPaths, regionName); + BulkLoadHFileRequest request = + RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName); + BulkLoadHFileResponse response = + server.bulkLoadHFile(null, request); + return response.getLoaded(); } }; diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java b/src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java new file mode 100644 index 00000000000..422e8658774 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.protobuf; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.ipc.VersionedProtocol; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService; +import org.apache.hadoop.hbase.security.TokenInfo; +import org.apache.hadoop.security.KerberosInfo; + +/** + * Protocol that a HBase client uses to communicate with a region server. + */ +@KerberosInfo( + serverPrincipal = "hbase.regionserver.kerberos.principal") +@TokenInfo("HBASE_AUTH_TOKEN") +@InterfaceAudience.Private +public interface AdminProtocol extends + AdminService.BlockingInterface, VersionedProtocol { + public static final long VERSION = 1L; +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java b/src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java new file mode 100644 index 00000000000..3d6a23aeeb4 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.protobuf; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.ipc.VersionedProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; +import org.apache.hadoop.hbase.security.TokenInfo; +import org.apache.hadoop.security.KerberosInfo; + +/** + * Protocol that a HBase client uses to communicate with a region server. + */ +@KerberosInfo( + serverPrincipal = "hbase.regionserver.kerberos.principal") +@TokenInfo("HBASE_AUTH_TOKEN") +@InterfaceAudience.Public +@InterfaceStability.Evolving +public interface ClientProtocol extends + ClientService.BlockingInterface, VersionedProtocol { + public static final long VERSION = 1L; +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 2eb57de81b9..b0568302bd0 100644 --- a/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -17,12 +17,89 @@ */ package org.apache.hadoop.hbase.protobuf; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.RowLock; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Exec; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo; +import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import com.google.protobuf.ByteString; +import com.google.protobuf.ServiceException; + /** * Protobufs utility. */ -public class ProtobufUtil { +public final class ProtobufUtil { + + private ProtobufUtil() { + } + + /** + * Primitive type to class mapping. + */ + private final static Map> + PRIMITIVES = new HashMap>(); + + static { + PRIMITIVES.put(Boolean.TYPE.getName(), Boolean.TYPE); + PRIMITIVES.put(Byte.TYPE.getName(), Byte.TYPE); + PRIMITIVES.put(Character.TYPE.getName(), Character.TYPE); + PRIMITIVES.put(Short.TYPE.getName(), Short.TYPE); + PRIMITIVES.put(Integer.TYPE.getName(), Integer.TYPE); + PRIMITIVES.put(Long.TYPE.getName(), Long.TYPE); + PRIMITIVES.put(Float.TYPE.getName(), Float.TYPE); + PRIMITIVES.put(Double.TYPE.getName(), Double.TYPE); + PRIMITIVES.put(Void.TYPE.getName(), Void.TYPE); + } + /** * Magic we put ahead of a serialized protobuf message. * For example, all znode content is protobuf messages with the below magic @@ -56,4 +133,592 @@ public class ProtobufUtil { public static int lengthOfPBMagic() { return PB_MAGIC.length; } + + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + public static IOException getRemoteException(ServiceException se) { + Throwable e = se.getCause(); + if (e == null) { + return new IOException(se); + } + return e instanceof IOException ? (IOException) e : new IOException(se); + } + + /** + * Convert a protocol buffer Exec to a client Exec + * + * @param proto the protocol buffer Exec to convert + * @return the converted client Exec + */ + @SuppressWarnings("unchecked") + public static Exec toExec( + final ClientProtos.Exec proto) throws IOException { + byte[] row = proto.getRow().toByteArray(); + String protocolName = proto.getProtocolName(); + String methodName = proto.getMethodName(); + List parameters = new ArrayList(); + Class protocol = null; + Method method = null; + try { + List> types = new ArrayList>(); + for (NameBytesPair parameter: proto.getParameterList()) { + String type = parameter.getName(); + Class declaredClass = PRIMITIVES.get(type); + if (declaredClass == null) { + declaredClass = Class.forName(parameter.getName()); + } + parameters.add(toObject(parameter)); + types.add(declaredClass); + } + Class [] parameterTypes = new Class [types.size()]; + types.toArray(parameterTypes); + protocol = (Class) + Class.forName(protocolName); + method = protocol.getMethod(methodName, parameterTypes); + } catch (NoSuchMethodException nsme) { + throw new IOException(nsme); + } catch (ClassNotFoundException cnfe) { + throw new IOException(cnfe); + } + Configuration conf = HBaseConfiguration.create(); + for (NameStringPair p: proto.getPropertyList()) { + conf.set(p.getName(), p.getValue()); + } + Object[] parameterObjects = new Object[parameters.size()]; + parameters.toArray(parameterObjects); + return new Exec(conf, row, protocol, + method, parameterObjects); + } + + /** + * Convert a ServerName to a protocol buffer ServerName + * + * @param serverName the ServerName to convert + * @return the converted protocol buffer ServerName + */ + public static HBaseProtos.ServerName + toServerName(final ServerName serverName) { + if (serverName == null) return null; + HBaseProtos.ServerName.Builder builder = + HBaseProtos.ServerName.newBuilder(); + builder.setHostName(serverName.getHostname()); + if (serverName.getPort() >= 0) { + builder.setPort(serverName.getPort()); + } + if (serverName.getStartcode() >= 0) { + builder.setStartCode(serverName.getStartcode()); + } + return builder.build(); + } + + /** + * Convert a RegionInfo to a HRegionInfo + * + * @param proto the RegionInfo to convert + * @return the converted HRegionInfo + */ + public static HRegionInfo + toRegionInfo(final RegionInfo proto) { + if (proto == null) return null; + byte[] tableName = proto.getTableName().toByteArray(); + long regionId = proto.getRegionId(); + byte[] startKey = null; + byte[] endKey = null; + if (proto.hasStartKey()) { + startKey = proto.getStartKey().toByteArray(); + } + if (proto.hasEndKey()) { + endKey = proto.getEndKey().toByteArray(); + } + + return new HRegionInfo(tableName, + startKey, endKey, false, regionId); + } + + /** + * Convert a HRegionInfo to a RegionInfo + * + * @param info the HRegionInfo to convert + * @return the converted RegionInfo + */ + public static RegionInfo + toRegionInfo(final HRegionInfo info) { + if (info == null) return null; + RegionInfo.Builder builder = RegionInfo.newBuilder(); + builder.setTableName(ByteString.copyFrom(info.getTableName())); + builder.setRegionId(info.getRegionId()); + if (info.getStartKey() != null) { + builder.setStartKey(ByteString.copyFrom(info.getStartKey())); + } + if (info.getEndKey() != null) { + builder.setEndKey(ByteString.copyFrom(info.getEndKey())); + } + return builder.build(); + } + + /** + * Convert a protocol buffer Get to a client Get + * + * @param get the protocol buffer Get to convert + * @return the converted client Get + * @throws IOException + */ + public static Get toGet( + final ClientProtos.Get proto) throws IOException { + if (proto == null) return null; + byte[] row = proto.getRow().toByteArray(); + RowLock rowLock = null; + if (proto.hasLockId()) { + rowLock = new RowLock(proto.getLockId()); + } + Get get = new Get(row, rowLock); + if (proto.hasCacheBlocks()) { + get.setCacheBlocks(proto.getCacheBlocks()); + } + if (proto.hasMaxVersions()) { + get.setMaxVersions(proto.getMaxVersions()); + } + if (proto.hasTimeRange()) { + HBaseProtos.TimeRange timeRange = proto.getTimeRange(); + long minStamp = 0; + long maxStamp = Long.MAX_VALUE; + if (timeRange.hasFrom()) { + minStamp = timeRange.getFrom(); + } + if (timeRange.hasTo()) { + maxStamp = timeRange.getTo(); + } + get.setTimeRange(minStamp, maxStamp); + } + if (proto.hasFilter()) { + NameBytesPair filter = proto.getFilter(); + get.setFilter((Filter)toObject(filter)); + } + for (NameBytesPair attribute: proto.getAttributeList()) { + get.setAttribute(attribute.getName(), attribute.getValue().toByteArray()); + } + if (proto.getColumnCount() > 0) { + for (Column column: proto.getColumnList()) { + byte[] family = column.getFamily().toByteArray(); + if (column.getQualifierCount() > 0) { + for (ByteString qualifier: column.getQualifierList()) { + get.addColumn(family, qualifier.toByteArray()); + } + } else { + get.addFamily(family); + } + } + } + return get; + } + + /** + * Convert a protocol buffer Mutate to a Put + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Put + * @throws DoNotRetryIOException + */ + public static Put toPut( + final Mutate proto) throws DoNotRetryIOException { + MutateType type = proto.getMutateType(); + assert type == MutateType.PUT : type.name(); + byte[] row = proto.getRow().toByteArray(); + long timestamp = HConstants.LATEST_TIMESTAMP; + if (proto.hasTimestamp()) { + timestamp = proto.getTimestamp(); + } + RowLock lock = null; + if (proto.hasLockId()) { + lock = new RowLock(proto.getLockId()); + } + Put put = new Put(row, timestamp, lock); + put.setWriteToWAL(proto.getWriteToWAL()); + for (NameBytesPair attribute: proto.getAttributeList()) { + put.setAttribute(attribute.getName(), + attribute.getValue().toByteArray()); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + byte[] qualifier = qv.getQualifier().toByteArray(); + if (!qv.hasValue()) { + throw new DoNotRetryIOException( + "Missing required field: qualifer value"); + } + byte[] value = qv.getValue().toByteArray(); + long ts = timestamp; + if (qv.hasTimestamp()) { + ts = qv.getTimestamp(); + } + put.add(family, qualifier, ts, value); + } + } + return put; + } + + /** + * Convert a protocol buffer Mutate to a Delete + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Delete + */ + public static Delete toDelete(final Mutate proto) { + MutateType type = proto.getMutateType(); + assert type == MutateType.DELETE : type.name(); + byte[] row = proto.getRow().toByteArray(); + long timestamp = HConstants.LATEST_TIMESTAMP; + if (proto.hasTimestamp()) { + timestamp = proto.getTimestamp(); + } + RowLock lock = null; + if (proto.hasLockId()) { + lock = new RowLock(proto.getLockId()); + } + Delete delete = new Delete(row, timestamp, lock); + delete.setWriteToWAL(proto.getWriteToWAL()); + for (NameBytesPair attribute: proto.getAttributeList()) { + delete.setAttribute(attribute.getName(), + attribute.getValue().toByteArray()); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + DeleteType deleteType = qv.getDeleteType(); + byte[] qualifier = null; + if (qv.hasQualifier()) { + qualifier = qv.getQualifier().toByteArray(); + } + long ts = HConstants.LATEST_TIMESTAMP; + if (qv.hasTimestamp()) { + ts = qv.getTimestamp(); + } + if (deleteType == DeleteType.DELETE_ONE_VERSION) { + delete.deleteColumn(family, qualifier, ts); + } else if (deleteType == DeleteType.DELETE_MULTIPLE_VERSIONS) { + delete.deleteColumns(family, qualifier, ts); + } else { + delete.deleteFamily(family, ts); + } + } + } + return delete; + } + + /** + * Convert a protocol buffer Mutate to an Append + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Append + * @throws DoNotRetryIOException + */ + public static Append toAppend( + final Mutate proto) throws DoNotRetryIOException { + MutateType type = proto.getMutateType(); + assert type == MutateType.APPEND : type.name(); + byte[] row = proto.getRow().toByteArray(); + Append append = new Append(row); + append.setWriteToWAL(proto.getWriteToWAL()); + for (NameBytesPair attribute: proto.getAttributeList()) { + append.setAttribute(attribute.getName(), + attribute.getValue().toByteArray()); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + byte[] qualifier = qv.getQualifier().toByteArray(); + if (!qv.hasValue()) { + throw new DoNotRetryIOException( + "Missing required field: qualifer value"); + } + byte[] value = qv.getValue().toByteArray(); + append.add(family, qualifier, value); + } + } + return append; + } + + /** + * Convert a protocol buffer Mutate to an Increment + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Increment + * @throws IOException + */ + public static Increment toIncrement( + final Mutate proto) throws IOException { + MutateType type = proto.getMutateType(); + assert type == MutateType.INCREMENT : type.name(); + RowLock lock = null; + if (proto.hasLockId()) { + lock = new RowLock(proto.getLockId()); + } + byte[] row = proto.getRow().toByteArray(); + Increment increment = new Increment(row, lock); + increment.setWriteToWAL(proto.getWriteToWAL()); + if (proto.hasTimeRange()) { + HBaseProtos.TimeRange timeRange = proto.getTimeRange(); + long minStamp = 0; + long maxStamp = Long.MAX_VALUE; + if (timeRange.hasFrom()) { + minStamp = timeRange.getFrom(); + } + if (timeRange.hasTo()) { + maxStamp = timeRange.getTo(); + } + increment.setTimeRange(minStamp, maxStamp); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + byte[] qualifier = qv.getQualifier().toByteArray(); + if (!qv.hasValue()) { + throw new DoNotRetryIOException( + "Missing required field: qualifer value"); + } + long value = Bytes.toLong(qv.getValue().toByteArray()); + increment.addColumn(family, qualifier, value); + } + } + return increment; + } + + /** + * Convert a protocol buffer Scan to a client Scan + * + * @param proto the protocol buffer Scan to convert + * @return the converted client Scan + * @throws IOException + */ + public static Scan toScan( + final ClientProtos.Scan proto) throws IOException { + byte [] startRow = HConstants.EMPTY_START_ROW; + byte [] stopRow = HConstants.EMPTY_END_ROW; + if (proto.hasStartRow()) { + startRow = proto.getStartRow().toByteArray(); + } + if (proto.hasStopRow()) { + stopRow = proto.getStopRow().toByteArray(); + } + Scan scan = new Scan(startRow, stopRow); + if (proto.hasCacheBlocks()) { + scan.setCacheBlocks(proto.getCacheBlocks()); + } + if (proto.hasMaxVersions()) { + scan.setMaxVersions(proto.getMaxVersions()); + } + if (proto.hasTimeRange()) { + HBaseProtos.TimeRange timeRange = proto.getTimeRange(); + long minStamp = 0; + long maxStamp = Long.MAX_VALUE; + if (timeRange.hasFrom()) { + minStamp = timeRange.getFrom(); + } + if (timeRange.hasTo()) { + maxStamp = timeRange.getTo(); + } + scan.setTimeRange(minStamp, maxStamp); + } + if (proto.hasFilter()) { + NameBytesPair filter = proto.getFilter(); + scan.setFilter((Filter)toObject(filter)); + } + if (proto.hasBatchSize()) { + scan.setBatch(proto.getBatchSize()); + } + for (NameBytesPair attribute: proto.getAttributeList()) { + scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray()); + } + if (proto.getColumnCount() > 0) { + for (Column column: proto.getColumnList()) { + byte[] family = column.getFamily().toByteArray(); + if (column.getQualifierCount() > 0) { + for (ByteString qualifier: column.getQualifierList()) { + scan.addColumn(family, qualifier.toByteArray()); + } + } else { + scan.addFamily(family); + } + } + } + return scan; + } + + /** + * Convert a client Result to a protocol buffer Result + * + * @param result the client Result to convert + * @return the converted protocol buffer Result + */ + public static ClientProtos.Result toResult(final Result result) { + ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder(); + List protos = new ArrayList(); + List keyValues = result.list(); + if (keyValues != null) { + for (KeyValue keyValue: keyValues) { + ByteString value = ByteString.copyFrom(keyValue.getBuffer(), + keyValue.getOffset(), keyValue.getLength()); + protos.add(value); + } + } + builder.addAllKeyValueBytes(protos); + return builder.build(); + } + + /** + * Convert a protocol buffer Result to a client Result + * + * @param proto the protocol buffer Result to convert + * @return the converted client Result + */ + public static Result toResult(final ClientProtos.Result proto) { + List values = proto.getKeyValueBytesList(); + List keyValues = new ArrayList(values.size()); + for (ByteString value: values) { + keyValues.add(new KeyValue(value.toByteArray())); + } + return new Result(keyValues); + } + + /** + * Get the HLog entries from a list of protocol buffer WALEntry + * + * @param protoList the list of protocol buffer WALEntry + * @return an array of HLog entries + */ + public static HLog.Entry[] + toHLogEntries(final List protoList) { + List entries = new ArrayList(); + for (WALEntry entry: protoList) { + WALKey walKey = entry.getWalKey(); + java.util.UUID clusterId = HConstants.DEFAULT_CLUSTER_ID; + if (walKey.hasClusterId()) { + UUID protoUuid = walKey.getClusterId(); + clusterId = new java.util.UUID( + protoUuid.getMostSigBits(), protoUuid.getLeastSigBits()); + } + HLogKey key = new HLogKey(walKey.getEncodedRegionName().toByteArray(), + walKey.getTableName().toByteArray(), walKey.getLogSequenceNumber(), + walKey.getWriteTime(), clusterId); + WALEntry.WALEdit walEdit = entry.getEdit(); + WALEdit edit = new WALEdit(); + for (ByteString keyValue: walEdit.getKeyValueList()) { + edit.add(new KeyValue(keyValue.toByteArray())); + } + if (walEdit.getFamilyScopeCount() > 0) { + TreeMap scopes = new TreeMap(); + for (FamilyScope scope: walEdit.getFamilyScopeList()) { + scopes.put(scope.getFamily().toByteArray(), + Integer.valueOf(scope.getScopeType().ordinal())); + } + edit.setScopes(scopes); + } + entries.add(new HLog.Entry(key, edit)); + } + return entries.toArray(new HLog.Entry[entries.size()]); + } + + /** + * Convert a protocol buffer Parameter to a Java object + * + * @param parameter the protocol buffer Parameter to convert + * @return the converted Java object + * @throws IOException if failed to deserialize the parameter + */ + public static Object toObject( + final NameBytesPair parameter) throws IOException { + if (parameter == null || !parameter.hasValue()) return null; + byte[] bytes = parameter.getValue().toByteArray(); + ByteArrayInputStream bais = null; + try { + bais = new ByteArrayInputStream(bytes); + DataInput in = new DataInputStream(bais); + return HbaseObjectWritable.readObject(in, null); + } finally { + if (bais != null) { + bais.close(); + } + } + } + + /** + * Convert a stringified protocol buffer exception Parameter to a Java Exception + * + * @param parameter the protocol buffer Parameter to convert + * @return the converted Exception + * @throws IOException if failed to deserialize the parameter + */ + @SuppressWarnings("unchecked") + public static Throwable toException( + final NameBytesPair parameter) throws IOException { + if (parameter == null || !parameter.hasValue()) return null; + String desc = parameter.getValue().toStringUtf8(); + String type = parameter.getName(); + try { + Class c = + (Class)Class.forName(type); + Constructor cn = + c.getDeclaredConstructor(String.class); + return cn.newInstance(desc); + } catch (Exception e) { + throw new IOException(e); + } + } + + /** + * Serialize a Java Object into a Parameter. The Java Object should be a + * Writable or protocol buffer Message + * + * @param value the Writable/Message object to be serialized + * @return the converted protocol buffer Parameter + * @throws IOException if failed to serialize the object + */ + public static NameBytesPair toParameter( + final Object value) throws IOException { + Class declaredClass = Object.class; + if (value != null) { + declaredClass = value.getClass(); + } + return toParameter(declaredClass, value); + } + + /** + * Serialize a Java Object into a Parameter. The Java Object should be a + * Writable or protocol buffer Message + * + * @param declaredClass the declared class of the parameter + * @param value the Writable/Message object to be serialized + * @return the converted protocol buffer Parameter + * @throws IOException if failed to serialize the object + */ + public static NameBytesPair toParameter( + final Class declaredClass, final Object value) throws IOException { + NameBytesPair.Builder builder = NameBytesPair.newBuilder(); + builder.setName(declaredClass.getName()); + if (value != null) { + ByteArrayOutputStream baos = null; + try { + baos = new ByteArrayOutputStream(); + DataOutput out = new DataOutputStream(baos); + Class clz = declaredClass; + if (HbaseObjectWritable.getClassCode(declaredClass) == null) { + clz = value.getClass(); + } + HbaseObjectWritable.writeObject(out, value, clz, null); + builder.setValue( + ByteString.copyFrom(baos.toByteArray())); + } finally { + if (baos != null) { + baos.close(); + } + } + } + return builder.build(); + } } \ No newline at end of file diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java new file mode 100644 index 00000000000..a912cc33dcf --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -0,0 +1,782 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.protobuf; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NavigableMap; +import java.util.NavigableSet; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Action; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Exec; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; +import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; + +/** + * Helper utility to build protocol buffer requests, + * or build components for protocol buffer requests. + */ +@InterfaceAudience.Private +public final class RequestConverter { + + private RequestConverter() { + } + +// Start utilities for Client + +/** + * Create a new protocol buffer GetRequest to get a row, all columns in a family. + * If there is no such row, return the closest row before it. + * + * @param regionName the name of the region to get + * @param row the row to get + * @param family the column family to get + * @param closestRowBefore if the requested row doesn't exist, + * should return the immediate row before + * @return a protocol buffer GetReuqest + */ + public static GetRequest buildGetRequest(final byte[] regionName, + final byte[] row, final byte[] family, boolean closestRowBefore) { + GetRequest.Builder builder = GetRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setClosestRowBefore(closestRowBefore); + builder.setRegion(region); + + Column.Builder columnBuilder = Column.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(family)); + ClientProtos.Get.Builder getBuilder = + ClientProtos.Get.newBuilder(); + getBuilder.setRow(ByteString.copyFrom(row)); + getBuilder.addColumn(columnBuilder.build()); + builder.setGet(getBuilder.build()); + return builder.build(); + } + + /** + * Create a protocol buffer GetRequest for a client Get + * + * @param regionName the name of the region to get + * @param get the client Get + * @return a protocol buffer GetReuqest + */ + public static GetRequest buildGetRequest(final byte[] regionName, + final Get get) throws IOException { + return buildGetRequest(regionName, get, false); + } + + /** + * Create a protocol buffer GetRequest for a client Get + * + * @param regionName the name of the region to get + * @param get the client Get + * @param existenceOnly indicate if check row existence only + * @return a protocol buffer GetReuqest + */ + public static GetRequest buildGetRequest(final byte[] regionName, + final Get get, final boolean existenceOnly) throws IOException { + GetRequest.Builder builder = GetRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setExistenceOnly(existenceOnly); + builder.setRegion(region); + builder.setGet(buildGet(get)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a client increment + * + * @param regionName + * @param row + * @param family + * @param qualifier + * @param amount + * @param writeToWAL + * @return a mutate request + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final byte[] row, final byte[] family, + final byte [] qualifier, final long amount, final boolean writeToWAL) { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + + Mutate.Builder mutateBuilder = Mutate.newBuilder(); + mutateBuilder.setRow(ByteString.copyFrom(row)); + mutateBuilder.setMutateType(MutateType.INCREMENT); + mutateBuilder.setWriteToWAL(writeToWAL); + ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(family)); + QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); + valueBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(amount))); + valueBuilder.setQualifier(ByteString.copyFrom(qualifier)); + columnBuilder.addQualifierValue(valueBuilder.build()); + mutateBuilder.addColumnValue(columnBuilder.build()); + + builder.setMutate(mutateBuilder.build()); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a conditioned put + * + * @param regionName + * @param row + * @param family + * @param qualifier + * @param comparator + * @param compareType + * @param put + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final byte[] row, final byte[] family, + final byte [] qualifier, final WritableByteArrayComparable comparator, + final CompareType compareType, final Put put) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + Condition condition = buildCondition( + row, family, qualifier, comparator, compareType); + builder.setMutate(buildMutate(MutateType.PUT, put)); + builder.setCondition(condition); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a conditioned delete + * + * @param regionName + * @param row + * @param family + * @param qualifier + * @param comparator + * @param compareType + * @param delete + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final byte[] row, final byte[] family, + final byte [] qualifier, final WritableByteArrayComparable comparator, + final CompareType compareType, final Delete delete) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + Condition condition = buildCondition( + row, family, qualifier, comparator, compareType); + builder.setMutate(buildMutate(MutateType.DELETE, delete)); + builder.setCondition(condition); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a put + * + * @param regionName + * @param put + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Put put) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(MutateType.PUT, put)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for an append + * + * @param regionName + * @param append + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Append append) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(MutateType.APPEND, append)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a client increment + * + * @param regionName + * @param increment + * @return a mutate request + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Increment increment) { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(increment)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a delete + * + * @param regionName + * @param delete + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Delete delete) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(MutateType.DELETE, delete)); + return builder.build(); + } + + /** + * Create a protocol buffer MultiRequest for a row mutations + * + * @param regionName + * @param rowMutations + * @return a multi request + * @throws IOException + */ + public static MultiRequest buildMultiRequest(final byte[] regionName, + final RowMutations rowMutations) throws IOException { + MultiRequest.Builder builder = MultiRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setAtomic(true); + for (Mutation mutation: rowMutations.getMutations()) { + MutateType mutateType = null; + if (mutation instanceof Put) { + mutateType = MutateType.PUT; + } else if (mutation instanceof Delete) { + mutateType = MutateType.DELETE; + } else { + throw new DoNotRetryIOException( + "RowMutations supports only put and delete, not " + + mutation.getClass().getName()); + } + Mutate mutate = buildMutate(mutateType, mutation); + builder.addAction(ProtobufUtil.toParameter(mutate)); + } + return builder.build(); + } + + /** + * Create a protocol buffer ScanRequest for a client Scan + * + * @param regionName + * @param scan + * @param numberOfRows + * @param closeScanner + * @return a scan request + * @throws IOException + */ + public static ScanRequest buildScanRequest(final byte[] regionName, + final Scan scan, final int numberOfRows, + final boolean closeScanner) throws IOException { + ScanRequest.Builder builder = ScanRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setNumberOfRows(numberOfRows); + builder.setCloseScanner(closeScanner); + builder.setRegion(region); + + ClientProtos.Scan.Builder scanBuilder = + ClientProtos.Scan.newBuilder(); + scanBuilder.setCacheBlocks(scan.getCacheBlocks()); + scanBuilder.setBatchSize(scan.getBatch()); + scanBuilder.setMaxVersions(scan.getMaxVersions()); + TimeRange timeRange = scan.getTimeRange(); + if (!timeRange.isAllTime()) { + HBaseProtos.TimeRange.Builder timeRangeBuilder = + HBaseProtos.TimeRange.newBuilder(); + timeRangeBuilder.setFrom(timeRange.getMin()); + timeRangeBuilder.setTo(timeRange.getMax()); + scanBuilder.setTimeRange(timeRangeBuilder.build()); + } + Map attributes = scan.getAttributesMap(); + if (!attributes.isEmpty()) { + NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); + for (Map.Entry attribute: attributes.entrySet()) { + attributeBuilder.setName(attribute.getKey()); + attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + scanBuilder.addAttribute(attributeBuilder.build()); + } + } + byte[] startRow = scan.getStartRow(); + if (startRow != null && startRow.length > 0) { + scanBuilder.setStartRow(ByteString.copyFrom(startRow)); + } + byte[] stopRow = scan.getStopRow(); + if (stopRow != null && stopRow.length > 0) { + scanBuilder.setStopRow(ByteString.copyFrom(stopRow)); + } + if (scan.hasFilter()) { + scanBuilder.setFilter(ProtobufUtil.toParameter(scan.getFilter())); + } + Column.Builder columnBuilder = Column.newBuilder(); + for (Map.Entry> + family: scan.getFamilyMap().entrySet()) { + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + NavigableSet columns = family.getValue(); + columnBuilder.clearQualifier(); + if (columns != null && columns.size() > 0) { + for (byte [] qualifier: family.getValue()) { + if (qualifier != null) { + columnBuilder.addQualifier(ByteString.copyFrom(qualifier)); + } + } + } + scanBuilder.addColumn(columnBuilder.build()); + } + builder.setScan(scanBuilder.build()); + return builder.build(); + } + + /** + * Create a protocol buffer ScanRequest for a scanner id + * + * @param scannerId + * @param numberOfRows + * @param closeScanner + * @return a scan request + */ + public static ScanRequest buildScanRequest(final long scannerId, + final int numberOfRows, final boolean closeScanner) { + ScanRequest.Builder builder = ScanRequest.newBuilder(); + builder.setNumberOfRows(numberOfRows); + builder.setCloseScanner(closeScanner); + builder.setScannerId(scannerId); + return builder.build(); + } + + /** + * Create a protocol buffer LockRowRequest + * + * @param regionName + * @param row + * @return a lock row request + */ + public static LockRowRequest buildLockRowRequest( + final byte[] regionName, final byte[] row) { + LockRowRequest.Builder builder = LockRowRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.addRow(ByteString.copyFrom(row)); + return builder.build(); + } + + /** + * Create a protocol buffer UnlockRowRequest + * + * @param regionName + * @param lockId + * @return a unlock row request + */ + public static UnlockRowRequest buildUnlockRowRequest( + final byte[] regionName, final long lockId) { + UnlockRowRequest.Builder builder = UnlockRowRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setLockId(lockId); + return builder.build(); + } + + /** + * Create a protocol buffer bulk load request + * + * @param familyPaths + * @param regionName + * @return a bulk load request + */ + public static BulkLoadHFileRequest buildBulkLoadHFileRequest( + final List> familyPaths, final byte[] regionName) { + BulkLoadHFileRequest.Builder builder = BulkLoadHFileRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + FamilyPath.Builder familyPathBuilder = FamilyPath.newBuilder(); + for (Pair familyPath: familyPaths) { + familyPathBuilder.setFamily(ByteString.copyFrom(familyPath.getFirst())); + familyPathBuilder.setPath(familyPath.getSecond()); + builder.addFamilyPath(familyPathBuilder.build()); + } + return builder.build(); + } + + /** + * Create a protocol buffer coprocessor exec request + * + * @param regionName + * @param exec + * @return a coprocessor exec request + * @throws IOException + */ + public static ExecCoprocessorRequest buildExecCoprocessorRequest( + final byte[] regionName, final Exec exec) throws IOException { + ExecCoprocessorRequest.Builder builder = ExecCoprocessorRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setCall(buildExec(exec)); + return builder.build(); + } + + /** + * Create a protocol buffer multi request for a list of actions. + * RowMutations in the list (if any) will be ignored. + * + * @param regionName + * @param actions + * @return a multi request + * @throws IOException + */ + public static MultiRequest buildMultiRequest(final byte[] regionName, + final List> actions) throws IOException { + MultiRequest.Builder builder = MultiRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + for (Action action: actions) { + Message protoAction = null; + Row row = action.getAction(); + if (row instanceof Get) { + protoAction = buildGet((Get)row); + } else if (row instanceof Put) { + protoAction = buildMutate(MutateType.PUT, (Put)row); + } else if (row instanceof Delete) { + protoAction = buildMutate(MutateType.DELETE, (Delete)row); + } else if (row instanceof Exec) { + protoAction = buildExec((Exec)row); + } else if (row instanceof Append) { + protoAction = buildMutate(MutateType.APPEND, (Append)row); + } else if (row instanceof Increment) { + protoAction = buildMutate((Increment)row); + } else if (row instanceof RowMutations) { + continue; // ignore RowMutations + } else { + throw new DoNotRetryIOException( + "multi doesn't support " + row.getClass().getName()); + } + builder.addAction(ProtobufUtil.toParameter(protoAction)); + } + return builder.build(); + } + +// End utilities for Client + + /** + * Create a protocol buffer Condition + * + * @param row + * @param family + * @param qualifier + * @param comparator + * @param compareType + * @return a Condition + * @throws IOException + */ + private static Condition buildCondition(final byte[] row, + final byte[] family, final byte [] qualifier, + final WritableByteArrayComparable comparator, + final CompareType compareType) throws IOException { + Condition.Builder builder = Condition.newBuilder(); + builder.setRow(ByteString.copyFrom(row)); + builder.setFamily(ByteString.copyFrom(family)); + builder.setQualifier(ByteString.copyFrom(qualifier)); + builder.setComparator(ProtobufUtil.toParameter(comparator)); + builder.setCompareType(compareType); + return builder.build(); + } + + /** + * Create a new protocol buffer Exec based on a client Exec + * + * @param exec + * @return + * @throws IOException + */ + private static ClientProtos.Exec buildExec( + final Exec exec) throws IOException { + ClientProtos.Exec.Builder + builder = ClientProtos.Exec.newBuilder(); + Configuration conf = exec.getConf(); + if (conf != null) { + NameStringPair.Builder propertyBuilder = NameStringPair.newBuilder(); + Iterator> iterator = conf.iterator(); + while (iterator.hasNext()) { + Entry entry = iterator.next(); + propertyBuilder.setName(entry.getKey()); + propertyBuilder.setValue(entry.getValue()); + builder.addProperty(propertyBuilder.build()); + } + } + builder.setProtocolName(exec.getProtocolName()); + builder.setMethodName(exec.getMethodName()); + builder.setRow(ByteString.copyFrom(exec.getRow())); + Object[] parameters = exec.getParameters(); + if (parameters != null && parameters.length > 0) { + Class[] declaredClasses = exec.getParameterClasses(); + for (int i = 0, n = parameters.length; i < n; i++) { + builder.addParameter( + ProtobufUtil.toParameter(declaredClasses[i], parameters[i])); + } + } + return builder.build(); + } + + /** + * Create a protocol buffer Get based on a client Get. + * + * @param get the client Get + * @return a protocol buffer Get + * @throws IOException + */ + private static ClientProtos.Get buildGet( + final Get get) throws IOException { + ClientProtos.Get.Builder builder = + ClientProtos.Get.newBuilder(); + builder.setRow(ByteString.copyFrom(get.getRow())); + builder.setCacheBlocks(get.getCacheBlocks()); + builder.setMaxVersions(get.getMaxVersions()); + if (get.getLockId() >= 0) { + builder.setLockId(get.getLockId()); + } + if (get.getFilter() != null) { + builder.setFilter(ProtobufUtil.toParameter(get.getFilter())); + } + TimeRange timeRange = get.getTimeRange(); + if (!timeRange.isAllTime()) { + HBaseProtos.TimeRange.Builder timeRangeBuilder = + HBaseProtos.TimeRange.newBuilder(); + timeRangeBuilder.setFrom(timeRange.getMin()); + timeRangeBuilder.setTo(timeRange.getMax()); + builder.setTimeRange(timeRangeBuilder.build()); + } + Map attributes = get.getAttributesMap(); + if (!attributes.isEmpty()) { + NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); + for (Map.Entry attribute: attributes.entrySet()) { + attributeBuilder.setName(attribute.getKey()); + attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + builder.addAttribute(attributeBuilder.build()); + } + } + if (get.hasFamilies()) { + Column.Builder columnBuilder = Column.newBuilder(); + Map> families = get.getFamilyMap(); + for (Map.Entry> family: families.entrySet()) { + NavigableSet qualifiers = family.getValue(); + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.clearQualifier(); + if (qualifiers != null && qualifiers.size() > 0) { + for (byte[] qualifier: qualifiers) { + if (qualifier != null) { + columnBuilder.addQualifier(ByteString.copyFrom(qualifier)); + } + } + } + builder.addColumn(columnBuilder.build()); + } + } + return builder.build(); + } + + private static Mutate buildMutate(final Increment increment) { + Mutate.Builder builder = Mutate.newBuilder(); + builder.setRow(ByteString.copyFrom(increment.getRow())); + builder.setMutateType(MutateType.INCREMENT); + builder.setWriteToWAL(increment.getWriteToWAL()); + if (increment.getLockId() >= 0) { + builder.setLockId(increment.getLockId()); + } + TimeRange timeRange = increment.getTimeRange(); + if (!timeRange.isAllTime()) { + HBaseProtos.TimeRange.Builder timeRangeBuilder = + HBaseProtos.TimeRange.newBuilder(); + timeRangeBuilder.setFrom(timeRange.getMin()); + timeRangeBuilder.setTo(timeRange.getMax()); + builder.setTimeRange(timeRangeBuilder.build()); + } + ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); + QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); + for (Map.Entry> + family: increment.getFamilyMap().entrySet()) { + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.clearQualifierValue(); + NavigableMap values = family.getValue(); + if (values != null && values.size() > 0) { + for (Map.Entry value: values.entrySet()) { + valueBuilder.setQualifier(ByteString.copyFrom(value.getKey())); + valueBuilder.setValue(ByteString.copyFrom( + Bytes.toBytes(value.getValue().longValue()))); + columnBuilder.addQualifierValue(valueBuilder.build()); + } + } + builder.addColumnValue(columnBuilder.build()); + } + return builder.build(); + } + + /** + * Create a protocol buffer Mutate based on a client Mutation + * + * @param mutateType + * @param mutation + * @return a mutate + * @throws IOException + */ + private static Mutate buildMutate(final MutateType mutateType, + final Mutation mutation) throws IOException { + Mutate.Builder mutateBuilder = Mutate.newBuilder(); + mutateBuilder.setRow(ByteString.copyFrom(mutation.getRow())); + mutateBuilder.setMutateType(mutateType); + mutateBuilder.setWriteToWAL(mutation.getWriteToWAL()); + if (mutation.getLockId() >= 0) { + mutateBuilder.setLockId(mutation.getLockId()); + } + mutateBuilder.setTimestamp(mutation.getTimeStamp()); + Map attributes = mutation.getAttributesMap(); + if (!attributes.isEmpty()) { + NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); + for (Map.Entry attribute: attributes.entrySet()) { + attributeBuilder.setName(attribute.getKey()); + attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + mutateBuilder.addAttribute(attributeBuilder.build()); + } + } + ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); + QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); + for (Map.Entry> + family: mutation.getFamilyMap().entrySet()) { + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.clearQualifierValue(); + for (KeyValue value: family.getValue()) { + valueBuilder.setQualifier(ByteString.copyFrom(value.getQualifier())); + valueBuilder.setValue(ByteString.copyFrom(value.getValue())); + valueBuilder.setTimestamp(value.getTimestamp()); + if (mutateType == MutateType.DELETE) { + KeyValue.Type keyValueType = KeyValue.Type.codeToType(value.getType()); + valueBuilder.setDeleteType(toDeleteType(keyValueType)); + } + columnBuilder.addQualifierValue(valueBuilder.build()); + } + mutateBuilder.addColumnValue(columnBuilder.build()); + } + return mutateBuilder.build(); + } + + /** + * Convert a byte array to a protocol buffer RegionSpecifier + * + * @param type the region specifier type + * @param value the region specifier byte array value + * @return a protocol buffer RegionSpecifier + */ + private static RegionSpecifier buildRegionSpecifier( + final RegionSpecifierType type, final byte[] value) { + RegionSpecifier.Builder regionBuilder = RegionSpecifier.newBuilder(); + regionBuilder.setValue(ByteString.copyFrom(value)); + regionBuilder.setType(type); + return regionBuilder.build(); + } + + /** + * Convert a delete KeyValue type to protocol buffer DeleteType. + * + * @param type + * @return + * @throws IOException + */ + private static DeleteType toDeleteType( + KeyValue.Type type) throws IOException { + switch (type) { + case Delete: + return DeleteType.DELETE_ONE_VERSION; + case DeleteColumn: + return DeleteType.DELETE_MULTIPLE_VERSIONS; + case DeleteFamily: + return DeleteType.DELETE_FAMILY; + default: + throw new IOException("Unknown delete type: " + type); + } + } +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java new file mode 100644 index 00000000000..ecaf9febb8d --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java @@ -0,0 +1,187 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.protobuf; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.regionserver.RegionOpeningState; +import org.apache.hadoop.util.StringUtils; + +import com.google.protobuf.ByteString; + +/** + * Helper utility to build protocol buffer responses, + * or retrieve data from protocol buffer responses. + */ +@InterfaceAudience.Private +public final class ResponseConverter { + + private ResponseConverter() { + } + +// Start utilities for Client + + /** + * Get the client Results from a protocol buffer ScanResponse + * + * @param response the protocol buffer ScanResponse + * @return the client Results in the response + */ + public static Result[] getResults(final ScanResponse response) { + if (response == null) return null; + int count = response.getResultCount(); + Result[] results = new Result[count]; + for (int i = 0; i < count; i++) { + results[i] = ProtobufUtil.toResult(response.getResult(i)); + } + return results; + } + + /** + * Get the results from a protocol buffer MultiResponse + * + * @param proto the protocol buffer MultiResponse to convert + * @return the results in the MultiResponse + * @throws IOException + */ + public static List getResults( + final ClientProtos.MultiResponse proto) throws IOException { + List results = new ArrayList(); + List resultList = proto.getResultList(); + for (int i = 0, n = resultList.size(); i < n; i++) { + ActionResult result = resultList.get(i); + if (result.hasException()) { + results.add(ProtobufUtil.toException(result.getException())); + } else if (result.hasValue()) { + Object value = ProtobufUtil.toObject(result.getValue()); + if (value instanceof ClientProtos.Result) { + results.add(ProtobufUtil.toResult((ClientProtos.Result)value)); + } else { + results.add(value); + } + } else { + results.add(new Result()); + } + } + return results; + } + + /** + * Wrap a throwable to an action result. + * + * @param t + * @return an action result + */ + public static ActionResult buildActionResult(final Throwable t) { + ActionResult.Builder builder = ActionResult.newBuilder(); + NameBytesPair.Builder parameterBuilder = NameBytesPair.newBuilder(); + parameterBuilder.setName(t.getClass().getName()); + parameterBuilder.setValue( + ByteString.copyFromUtf8(StringUtils.stringifyException(t))); + builder.setException(parameterBuilder.build()); + return builder.build(); + } + +// End utilities for Client +// Start utilities for Admin + + /** + * Get the list of regions to flush from a RollLogWriterResponse + * + * @param proto the RollLogWriterResponse + * @return the the list of regions to flush + */ + public static byte[][] getRegions(final RollWALWriterResponse proto) { + if (proto == null || proto.getRegionToFlushCount() == 0) return null; + List regions = new ArrayList(); + for (ByteString region: proto.getRegionToFlushList()) { + regions.add(region.toByteArray()); + } + return (byte[][])regions.toArray(); + } + + /** + * Get the list of region info from a GetOnlineRegionResponse + * + * @param proto the GetOnlineRegionResponse + * @return the list of region info + */ + public static List getRegionInfos + (final GetOnlineRegionResponse proto) { + if (proto == null || proto.getRegionInfoCount() == 0) return null; + List regionInfos = new ArrayList(); + for (RegionInfo regionInfo: proto.getRegionInfoList()) { + regionInfos.add(ProtobufUtil.toRegionInfo(regionInfo)); + } + return regionInfos; + } + + /** + * Get the region info from a GetRegionInfoResponse + * + * @param proto the GetRegionInfoResponse + * @return the region info + */ + public static HRegionInfo getRegionInfo + (final GetRegionInfoResponse proto) { + if (proto == null || proto.getRegionInfo() == null) return null; + return ProtobufUtil.toRegionInfo(proto.getRegionInfo()); + } + + /** + * Get the region opening state from a OpenRegionResponse + * + * @param proto the OpenRegionResponse + * @return the region opening state + */ + public static RegionOpeningState getRegionOpeningState + (final OpenRegionResponse proto) { + if (proto == null || proto.getOpeningStateCount() != 1) return null; + return RegionOpeningState.valueOf( + proto.getOpeningState(0).name()); + } + + /** + * Check if the region is closed from a CloseRegionResponse + * + * @param proto the CloseRegionResponse + * @return the region close state + */ + public static boolean isClosed + (final CloseRegionResponse proto) { + if (proto == null || !proto.hasClosed()) return false; + return proto.getClosed(); + } + +// End utilities for Admin +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java similarity index 74% rename from src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java rename to src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java index 216931094ff..e78e56de166 100644 --- a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java @@ -1,10 +1,10 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: RegionAdmin.proto +// source: Admin.proto package org.apache.hadoop.hbase.protobuf.generated; -public final class RegionAdminProtos { - private RegionAdminProtos() {} +public final class AdminProtos { + private AdminProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } @@ -36,12 +36,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; } private int bitField0_; @@ -114,10 +114,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -142,41 +142,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -185,7 +185,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -196,12 +196,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -211,7 +211,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -224,18 +224,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -270,24 +270,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -295,8 +295,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -313,16 +313,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -509,12 +509,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; } private int bitField0_; @@ -587,10 +587,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj; boolean result = true; result = result && (hasRegionInfo() == other.hasRegionInfo()); @@ -615,41 +615,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -658,7 +658,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -669,12 +669,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -684,7 +684,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -697,18 +697,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -743,24 +743,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -768,8 +768,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -786,16 +786,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; if (other.hasRegionInfo()) { mergeRegionInfo(other.getRegionInfo()); } @@ -987,12 +987,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; } private int bitField0_; @@ -1092,10 +1092,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -1126,41 +1126,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -1169,7 +1169,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1180,12 +1180,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1195,7 +1195,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -1208,18 +1208,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1256,24 +1256,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -1281,8 +1281,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -1304,16 +1304,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -1566,12 +1566,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; } // repeated string storeFile = 1; @@ -1641,10 +1641,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse) obj; boolean result = true; result = result && getStoreFileList() @@ -1666,41 +1666,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -1709,7 +1709,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1720,12 +1720,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1735,7 +1735,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -1748,18 +1748,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1789,24 +1789,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -1814,8 +1814,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( @@ -1828,16 +1828,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance()) return this; if (!other.storeFile_.isEmpty()) { if (storeFile_.isEmpty()) { storeFile_ = other.storeFile_; @@ -1980,12 +1980,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; } private void initFields() { @@ -2028,10 +2028,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; boolean result = true; result = result && @@ -2047,41 +2047,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -2090,7 +2090,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2101,12 +2101,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2116,7 +2116,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2129,18 +2129,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2168,24 +2168,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -2193,23 +2193,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -2290,12 +2290,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; } // repeated .RegionInfo regionInfo = 1; @@ -2373,10 +2373,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj; boolean result = true; result = result && getRegionInfoList() @@ -2398,41 +2398,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -2441,7 +2441,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2452,12 +2452,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2467,7 +2467,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2480,18 +2480,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2526,24 +2526,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -2551,8 +2551,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this); int from_bitField0_ = bitField0_; if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -2568,16 +2568,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; if (regionInfoBuilder_ == null) { if (!other.regionInfo_.isEmpty()) { if (regionInfo_.isEmpty()) { @@ -2887,12 +2887,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; } private int bitField0_; @@ -2989,10 +2989,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj; boolean result = true; result = result && getRegionList() @@ -3023,41 +3023,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -3066,7 +3066,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3077,12 +3077,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3092,7 +3092,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -3105,18 +3105,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -3153,24 +3153,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -3178,8 +3178,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (regionBuilder_ == null) { @@ -3201,16 +3201,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this; if (regionBuilder_ == null) { if (!other.region_.isEmpty()) { if (region_.isEmpty()) { @@ -3516,9 +3516,9 @@ public final class RegionAdminProtos { extends com.google.protobuf.MessageOrBuilder { // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - java.util.List getOpeningStateList(); + java.util.List getOpeningStateList(); int getOpeningStateCount(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); } public static final class OpenRegionResponse extends com.google.protobuf.GeneratedMessage @@ -3540,12 +3540,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; } public enum RegionOpeningState @@ -3593,7 +3593,7 @@ public final class RegionAdminProtos { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); } private static final RegionOpeningState[] VALUES = { @@ -3622,14 +3622,14 @@ public final class RegionAdminProtos { // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; public static final int OPENINGSTATE_FIELD_NUMBER = 1; - private java.util.List openingState_; - public java.util.List getOpeningStateList() { + private java.util.List openingState_; + public java.util.List getOpeningStateList() { return openingState_; } public int getOpeningStateCount() { return openingState_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { return openingState_.get(index); } @@ -3686,10 +3686,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj; boolean result = true; result = result && getOpeningStateList() @@ -3711,41 +3711,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -3754,7 +3754,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3765,12 +3765,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3780,7 +3780,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -3793,18 +3793,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -3834,24 +3834,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -3859,8 +3859,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { openingState_ = java.util.Collections.unmodifiableList(openingState_); @@ -3872,16 +3872,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this; if (!other.openingState_.isEmpty()) { if (openingState_.isEmpty()) { openingState_ = other.openingState_; @@ -3925,7 +3925,7 @@ public final class RegionAdminProtos { } case 8: { int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { @@ -3938,7 +3938,7 @@ public final class RegionAdminProtos { int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { @@ -3955,25 +3955,25 @@ public final class RegionAdminProtos { private int bitField0_; // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - private java.util.List openingState_ = + private java.util.List openingState_ = java.util.Collections.emptyList(); private void ensureOpeningStateIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = new java.util.ArrayList(openingState_); + openingState_ = new java.util.ArrayList(openingState_); bitField0_ |= 0x00000001; } } - public java.util.List getOpeningStateList() { + public java.util.List getOpeningStateList() { return java.util.Collections.unmodifiableList(openingState_); } public int getOpeningStateCount() { return openingState_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { return openingState_.get(index); } public Builder setOpeningState( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { if (value == null) { throw new NullPointerException(); } @@ -3982,7 +3982,7 @@ public final class RegionAdminProtos { onChanged(); return this; } - public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value) { + public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { if (value == null) { throw new NullPointerException(); } @@ -3992,7 +3992,7 @@ public final class RegionAdminProtos { return this; } public Builder addAllOpeningState( - java.lang.Iterable values) { + java.lang.Iterable values) { ensureOpeningStateIsMutable(); super.addAll(values, openingState_); onChanged(); @@ -4052,12 +4052,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; } private int bitField0_; @@ -4166,10 +4166,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -4212,41 +4212,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -4255,7 +4255,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4266,12 +4266,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4281,7 +4281,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -4294,18 +4294,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -4344,24 +4344,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -4369,8 +4369,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -4395,16 +4395,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -4648,12 +4648,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; } private int bitField0_; @@ -4719,10 +4719,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj; boolean result = true; result = result && (hasClosed() == other.hasClosed()); @@ -4747,41 +4747,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -4790,7 +4790,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4801,12 +4801,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4816,7 +4816,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -4829,18 +4829,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -4870,24 +4870,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -4895,8 +4895,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -4909,16 +4909,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this; if (other.hasClosed()) { setClosed(other.getClosed()); } @@ -5032,12 +5032,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; } private int bitField0_; @@ -5128,10 +5128,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -5165,41 +5165,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -5208,7 +5208,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5219,12 +5219,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5234,7 +5234,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -5247,18 +5247,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -5295,24 +5295,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -5320,8 +5320,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -5342,16 +5342,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -5570,12 +5570,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; } private int bitField0_; @@ -5659,10 +5659,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj; boolean result = true; result = result && (hasLastFlushTime() == other.hasLastFlushTime()); @@ -5696,41 +5696,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -5739,7 +5739,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5750,12 +5750,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5765,7 +5765,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -5778,18 +5778,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -5821,24 +5821,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -5846,8 +5846,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -5864,16 +5864,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this; if (other.hasLastFlushTime()) { setLastFlushTime(other.getLastFlushTime()); } @@ -6016,12 +6016,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; } private int bitField0_; @@ -6112,10 +6112,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -6149,41 +6149,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -6192,7 +6192,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6203,12 +6203,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6218,7 +6218,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -6231,18 +6231,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -6279,24 +6279,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -6304,8 +6304,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -6326,16 +6326,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -6549,12 +6549,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; } private void initFields() { @@ -6597,10 +6597,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj; boolean result = true; result = result && @@ -6616,41 +6616,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -6659,7 +6659,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6670,12 +6670,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6685,7 +6685,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -6698,18 +6698,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -6737,24 +6737,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -6762,23 +6762,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -6858,12 +6858,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; } private int bitField0_; @@ -6954,10 +6954,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -6991,41 +6991,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -7034,7 +7034,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7045,12 +7045,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7060,7 +7060,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -7073,18 +7073,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -7121,24 +7121,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -7146,8 +7146,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -7168,16 +7168,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -7388,12 +7388,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; } private void initFields() { @@ -7436,10 +7436,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj; boolean result = true; result = result && @@ -7455,41 +7455,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -7498,7 +7498,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7509,12 +7509,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7524,7 +7524,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -7537,18 +7537,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -7576,24 +7576,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -7601,23 +7601,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -7696,12 +7696,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; } private int bitField0_; @@ -7789,10 +7789,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj; boolean result = true; result = result && (hasLeastSigBits() == other.hasLeastSigBits()); @@ -7826,41 +7826,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -7869,7 +7869,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7880,12 +7880,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7895,7 +7895,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -7908,18 +7908,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -7951,24 +7951,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -7976,8 +7976,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -7994,16 +7994,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this; if (other.hasLeastSigBits()) { setLeastSigBits(other.getLeastSigBits()); } @@ -8123,13 +8123,13 @@ public final class RegionAdminProtos { // required .WALEntry.WALKey walKey = 1; boolean hasWalKey(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getWalKey(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder(); // required .WALEntry.WALEdit edit = 2; boolean hasEdit(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); } public static final class WALEntry extends com.google.protobuf.GeneratedMessage @@ -8151,12 +8151,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; } public interface WALKeyOrBuilder @@ -8180,8 +8180,8 @@ public final class RegionAdminProtos { // optional .UUID clusterId = 5; boolean hasClusterId(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); } public static final class WALKey extends com.google.protobuf.GeneratedMessage @@ -8203,12 +8203,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; } private int bitField0_; @@ -8254,14 +8254,14 @@ public final class RegionAdminProtos { // optional .UUID clusterId = 5; public static final int CLUSTERID_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID clusterId_; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_; public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { return clusterId_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { return clusterId_; } @@ -8270,7 +8270,7 @@ public final class RegionAdminProtos { tableName_ = com.google.protobuf.ByteString.EMPTY; logSequenceNumber_ = 0L; writeTime_ = 0L; - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -8367,10 +8367,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj; boolean result = true; result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); @@ -8431,41 +8431,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -8474,7 +8474,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -8485,12 +8485,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -8500,7 +8500,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -8513,18 +8513,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -8553,7 +8553,7 @@ public final class RegionAdminProtos { writeTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); } else { clusterIdBuilder_.clear(); } @@ -8567,24 +8567,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -8592,8 +8592,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -8626,16 +8626,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } @@ -8725,7 +8725,7 @@ public final class RegionAdminProtos { break; } case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(); if (hasClusterId()) { subBuilder.mergeFrom(getClusterId()); } @@ -8830,20 +8830,20 @@ public final class RegionAdminProtos { } // optional .UUID clusterId = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder> clusterIdBuilder_; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_; public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { if (clusterIdBuilder_ == null) { return clusterId_; } else { return clusterIdBuilder_.getMessage(); } } - public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID value) { + public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { if (clusterIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -8857,7 +8857,7 @@ public final class RegionAdminProtos { return this; } public Builder setClusterId( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) { if (clusterIdBuilder_ == null) { clusterId_ = builderForValue.build(); onChanged(); @@ -8867,12 +8867,12 @@ public final class RegionAdminProtos { bitField0_ |= 0x00000010; return this; } - public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID value) { + public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && - clusterId_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance()) { + clusterId_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) { clusterId_ = - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); } else { clusterId_ = value; } @@ -8885,7 +8885,7 @@ public final class RegionAdminProtos { } public Builder clearClusterId() { if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); onChanged(); } else { clusterIdBuilder_.clear(); @@ -8893,12 +8893,12 @@ public final class RegionAdminProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder getClusterIdBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() { bitField0_ |= 0x00000010; onChanged(); return getClusterIdFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); } else { @@ -8906,11 +8906,11 @@ public final class RegionAdminProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> getClusterIdFieldBuilder() { if (clusterIdBuilder_ == null) { clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>( clusterId_, getParentForChildren(), isClean()); @@ -8933,24 +8933,19 @@ public final class RegionAdminProtos { public interface WALEditOrBuilder extends com.google.protobuf.MessageOrBuilder { - // repeated .KeyValue keyValue = 1; - java.util.List - getKeyValueList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index); + // repeated bytes keyValue = 1; + java.util.List getKeyValueList(); int getKeyValueCount(); - java.util.List - getKeyValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( - int index); + com.google.protobuf.ByteString getKeyValue(int index); // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - java.util.List + java.util.List getFamilyScopeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); int getFamilyScopeCount(); - java.util.List + java.util.List getFamilyScopeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( int index); } public static final class WALEdit extends @@ -8973,12 +8968,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; } public enum ScopeType @@ -9023,7 +9018,7 @@ public final class RegionAdminProtos { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); } private static final ScopeType[] VALUES = { @@ -9059,7 +9054,7 @@ public final class RegionAdminProtos { // required .WALEntry.WALEdit.ScopeType scopeType = 2; boolean hasScopeType(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); } public static final class FamilyScope extends com.google.protobuf.GeneratedMessage @@ -9081,12 +9076,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; } private int bitField0_; @@ -9102,17 +9097,17 @@ public final class RegionAdminProtos { // required .WALEntry.WALEdit.ScopeType scopeType = 2; public static final int SCOPETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType scopeType_; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_; public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { return scopeType_; } private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -9174,10 +9169,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); @@ -9211,41 +9206,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -9254,7 +9249,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9265,12 +9260,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9280,7 +9275,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -9293,18 +9288,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -9325,7 +9320,7 @@ public final class RegionAdminProtos { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -9336,24 +9331,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -9361,8 +9356,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -9379,16 +9374,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } @@ -9441,7 +9436,7 @@ public final class RegionAdminProtos { } case 16: { int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { @@ -9481,14 +9476,14 @@ public final class RegionAdminProtos { } // required .WALEntry.WALEdit.ScopeType scopeType = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { return scopeType_; } - public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType value) { + public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) { if (value == null) { throw new NullPointerException(); } @@ -9499,7 +9494,7 @@ public final class RegionAdminProtos { } public Builder clearScopeType() { bitField0_ = (bitField0_ & ~0x00000002); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; onChanged(); return this; } @@ -9515,50 +9510,43 @@ public final class RegionAdminProtos { // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) } - // repeated .KeyValue keyValue = 1; + // repeated bytes keyValue = 1; public static final int KEYVALUE_FIELD_NUMBER = 1; - private java.util.List keyValue_; - public java.util.List getKeyValueList() { - return keyValue_; - } - public java.util.List - getKeyValueOrBuilderList() { + private java.util.List keyValue_; + public java.util.List + getKeyValueList() { return keyValue_; } public int getKeyValueCount() { return keyValue_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index) { - return keyValue_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( - int index) { + public com.google.protobuf.ByteString getKeyValue(int index) { return keyValue_.get(index); } // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; public static final int FAMILYSCOPE_FIELD_NUMBER = 2; - private java.util.List familyScope_; - public java.util.List getFamilyScopeList() { + private java.util.List familyScope_; + public java.util.List getFamilyScopeList() { return familyScope_; } - public java.util.List + public java.util.List getFamilyScopeOrBuilderList() { return familyScope_; } public int getFamilyScopeCount() { return familyScope_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { return familyScope_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( int index) { return familyScope_.get(index); } private void initFields() { - keyValue_ = java.util.Collections.emptyList(); + keyValue_ = java.util.Collections.emptyList();; familyScope_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; @@ -9566,12 +9554,6 @@ public final class RegionAdminProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - for (int i = 0; i < getKeyValueCount(); i++) { - if (!getKeyValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } for (int i = 0; i < getFamilyScopeCount(); i++) { if (!getFamilyScope(i).isInitialized()) { memoizedIsInitialized = 0; @@ -9586,7 +9568,7 @@ public final class RegionAdminProtos { throws java.io.IOException { getSerializedSize(); for (int i = 0; i < keyValue_.size(); i++) { - output.writeMessage(1, keyValue_.get(i)); + output.writeBytes(1, keyValue_.get(i)); } for (int i = 0; i < familyScope_.size(); i++) { output.writeMessage(2, familyScope_.get(i)); @@ -9600,9 +9582,14 @@ public final class RegionAdminProtos { if (size != -1) return size; size = 0; - for (int i = 0; i < keyValue_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, keyValue_.get(i)); + { + int dataSize = 0; + for (int i = 0; i < keyValue_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValue_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueList().size(); } for (int i = 0; i < familyScope_.size(); i++) { size += com.google.protobuf.CodedOutputStream @@ -9625,10 +9612,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj; boolean result = true; result = result && getKeyValueList() @@ -9656,41 +9643,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -9699,7 +9686,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9710,12 +9697,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9725,7 +9712,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -9738,18 +9725,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -9760,7 +9747,6 @@ public final class RegionAdminProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getKeyValueFieldBuilder(); getFamilyScopeFieldBuilder(); } } @@ -9770,12 +9756,8 @@ public final class RegionAdminProtos { public Builder clear() { super.clear(); - if (keyValueBuilder_ == null) { - keyValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - keyValueBuilder_.clear(); - } + keyValue_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); if (familyScopeBuilder_ == null) { familyScope_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); @@ -9791,24 +9773,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -9816,18 +9798,14 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this); int from_bitField0_ = bitField0_; - if (keyValueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - keyValue_ = java.util.Collections.unmodifiableList(keyValue_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.keyValue_ = keyValue_; - } else { - result.keyValue_ = keyValueBuilder_.build(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValue_ = java.util.Collections.unmodifiableList(keyValue_); + bitField0_ = (bitField0_ & ~0x00000001); } + result.keyValue_ = keyValue_; if (familyScopeBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { familyScope_ = java.util.Collections.unmodifiableList(familyScope_); @@ -9842,41 +9820,25 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; - if (keyValueBuilder_ == null) { - if (!other.keyValue_.isEmpty()) { - if (keyValue_.isEmpty()) { - keyValue_ = other.keyValue_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureKeyValueIsMutable(); - keyValue_.addAll(other.keyValue_); - } - onChanged(); - } - } else { - if (!other.keyValue_.isEmpty()) { - if (keyValueBuilder_.isEmpty()) { - keyValueBuilder_.dispose(); - keyValueBuilder_ = null; - keyValue_ = other.keyValue_; - bitField0_ = (bitField0_ & ~0x00000001); - keyValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getKeyValueFieldBuilder() : null; - } else { - keyValueBuilder_.addAllMessages(other.keyValue_); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; + if (!other.keyValue_.isEmpty()) { + if (keyValue_.isEmpty()) { + keyValue_ = other.keyValue_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueIsMutable(); + keyValue_.addAll(other.keyValue_); } + onChanged(); } if (familyScopeBuilder_ == null) { if (!other.familyScope_.isEmpty()) { @@ -9909,12 +9871,6 @@ public final class RegionAdminProtos { } public final boolean isInitialized() { - for (int i = 0; i < getKeyValueCount(); i++) { - if (!getKeyValue(i).isInitialized()) { - - return false; - } - } for (int i = 0; i < getFamilyScopeCount(); i++) { if (!getFamilyScope(i).isInitialized()) { @@ -9948,13 +9904,12 @@ public final class RegionAdminProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addKeyValue(subBuilder.buildPartial()); + ensureKeyValueIsMutable(); + keyValue_.add(input.readBytes()); break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addFamilyScope(subBuilder.buildPartial()); break; @@ -9965,206 +9920,71 @@ public final class RegionAdminProtos { private int bitField0_; - // repeated .KeyValue keyValue = 1; - private java.util.List keyValue_ = - java.util.Collections.emptyList(); + // repeated bytes keyValue = 1; + private java.util.List keyValue_ = java.util.Collections.emptyList();; private void ensureKeyValueIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - keyValue_ = new java.util.ArrayList(keyValue_); + keyValue_ = new java.util.ArrayList(keyValue_); bitField0_ |= 0x00000001; } } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> keyValueBuilder_; - - public java.util.List getKeyValueList() { - if (keyValueBuilder_ == null) { - return java.util.Collections.unmodifiableList(keyValue_); - } else { - return keyValueBuilder_.getMessageList(); - } + public java.util.List + getKeyValueList() { + return java.util.Collections.unmodifiableList(keyValue_); } public int getKeyValueCount() { - if (keyValueBuilder_ == null) { - return keyValue_.size(); - } else { - return keyValueBuilder_.getCount(); - } + return keyValue_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index) { - if (keyValueBuilder_ == null) { - return keyValue_.get(index); - } else { - return keyValueBuilder_.getMessage(index); - } + public com.google.protobuf.ByteString getKeyValue(int index) { + return keyValue_.get(index); } public Builder setKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (keyValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueIsMutable(); - keyValue_.set(index, value); - onChanged(); - } else { - keyValueBuilder_.setMessage(index, value); - } + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.set(index, value); + onChanged(); return this; } - public Builder setKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.set(index, builderForValue.build()); - onChanged(); - } else { - keyValueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addKeyValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (keyValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueIsMutable(); - keyValue_.add(value); - onChanged(); - } else { - keyValueBuilder_.addMessage(value); - } - return this; - } - public Builder addKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (keyValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueIsMutable(); - keyValue_.add(index, value); - onChanged(); - } else { - keyValueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addKeyValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.add(builderForValue.build()); - onChanged(); - } else { - keyValueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.add(index, builderForValue.build()); - onChanged(); - } else { - keyValueBuilder_.addMessage(index, builderForValue.build()); - } + public Builder addKeyValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.add(value); + onChanged(); return this; } public Builder addAllKeyValue( - java.lang.Iterable values) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - super.addAll(values, keyValue_); - onChanged(); - } else { - keyValueBuilder_.addAllMessages(values); - } + java.lang.Iterable values) { + ensureKeyValueIsMutable(); + super.addAll(values, keyValue_); + onChanged(); return this; } public Builder clearKeyValue() { - if (keyValueBuilder_ == null) { - keyValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - keyValueBuilder_.clear(); - } + keyValue_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); return this; } - public Builder removeKeyValue(int index) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.remove(index); - onChanged(); - } else { - keyValueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder getKeyValueBuilder( - int index) { - return getKeyValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( - int index) { - if (keyValueBuilder_ == null) { - return keyValue_.get(index); } else { - return keyValueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getKeyValueOrBuilderList() { - if (keyValueBuilder_ != null) { - return keyValueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(keyValue_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addKeyValueBuilder() { - return getKeyValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addKeyValueBuilder( - int index) { - return getKeyValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public java.util.List - getKeyValueBuilderList() { - return getKeyValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> - getKeyValueFieldBuilder() { - if (keyValueBuilder_ == null) { - keyValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder>( - keyValue_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - keyValue_ = null; - } - return keyValueBuilder_; - } // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - private java.util.List familyScope_ = + private java.util.List familyScope_ = java.util.Collections.emptyList(); private void ensureFamilyScopeIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = new java.util.ArrayList(familyScope_); + familyScope_ = new java.util.ArrayList(familyScope_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; - public java.util.List getFamilyScopeList() { + public java.util.List getFamilyScopeList() { if (familyScopeBuilder_ == null) { return java.util.Collections.unmodifiableList(familyScope_); } else { @@ -10178,7 +9998,7 @@ public final class RegionAdminProtos { return familyScopeBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { if (familyScopeBuilder_ == null) { return familyScope_.get(index); } else { @@ -10186,7 +10006,7 @@ public final class RegionAdminProtos { } } public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { if (familyScopeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10200,7 +10020,7 @@ public final class RegionAdminProtos { return this; } public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { if (familyScopeBuilder_ == null) { ensureFamilyScopeIsMutable(); familyScope_.set(index, builderForValue.build()); @@ -10210,7 +10030,7 @@ public final class RegionAdminProtos { } return this; } - public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { + public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { if (familyScopeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10224,7 +10044,7 @@ public final class RegionAdminProtos { return this; } public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { if (familyScopeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10238,7 +10058,7 @@ public final class RegionAdminProtos { return this; } public Builder addFamilyScope( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { if (familyScopeBuilder_ == null) { ensureFamilyScopeIsMutable(); familyScope_.add(builderForValue.build()); @@ -10249,7 +10069,7 @@ public final class RegionAdminProtos { return this; } public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { if (familyScopeBuilder_ == null) { ensureFamilyScopeIsMutable(); familyScope_.add(index, builderForValue.build()); @@ -10260,7 +10080,7 @@ public final class RegionAdminProtos { return this; } public Builder addAllFamilyScope( - java.lang.Iterable values) { + java.lang.Iterable values) { if (familyScopeBuilder_ == null) { ensureFamilyScopeIsMutable(); super.addAll(values, familyScope_); @@ -10290,18 +10110,18 @@ public final class RegionAdminProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( int index) { return getFamilyScopeFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( int index) { if (familyScopeBuilder_ == null) { return familyScope_.get(index); } else { return familyScopeBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getFamilyScopeOrBuilderList() { if (familyScopeBuilder_ != null) { return familyScopeBuilder_.getMessageOrBuilderList(); @@ -10309,25 +10129,25 @@ public final class RegionAdminProtos { return java.util.Collections.unmodifiableList(familyScope_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { return getFamilyScopeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( int index) { return getFamilyScopeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); } - public java.util.List + public java.util.List getFamilyScopeBuilderList() { return getFamilyScopeFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> getFamilyScopeFieldBuilder() { if (familyScopeBuilder_ == null) { familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( familyScope_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), @@ -10351,33 +10171,33 @@ public final class RegionAdminProtos { private int bitField0_; // required .WALEntry.WALKey walKey = 1; public static final int WALKEY_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey walKey_; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey walKey_; public boolean hasWalKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getWalKey() { return walKey_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { return walKey_; } // required .WALEntry.WALEdit edit = 2; public static final int EDIT_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit edit_; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_; public boolean hasEdit() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { return edit_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { return edit_; } private void initFields() { - walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); - edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -10447,10 +10267,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; boolean result = true; result = result && (hasWalKey() == other.hasWalKey()); @@ -10484,41 +10304,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -10527,7 +10347,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -10538,12 +10358,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -10553,7 +10373,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -10566,18 +10386,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -10599,13 +10419,13 @@ public final class RegionAdminProtos { public Builder clear() { super.clear(); if (walKeyBuilder_ == null) { - walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); } else { walKeyBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); } else { editBuilder_.clear(); } @@ -10619,24 +10439,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -10644,8 +10464,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -10670,16 +10490,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this; if (other.hasWalKey()) { mergeWalKey(other.getWalKey()); } @@ -10734,7 +10554,7 @@ public final class RegionAdminProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(); if (hasWalKey()) { subBuilder.mergeFrom(getWalKey()); } @@ -10743,7 +10563,7 @@ public final class RegionAdminProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(); if (hasEdit()) { subBuilder.mergeFrom(getEdit()); } @@ -10758,20 +10578,20 @@ public final class RegionAdminProtos { private int bitField0_; // required .WALEntry.WALKey walKey = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder> walKeyBuilder_; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> walKeyBuilder_; public boolean hasWalKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getWalKey() { if (walKeyBuilder_ == null) { return walKey_; } else { return walKeyBuilder_.getMessage(); } } - public Builder setWalKey(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey value) { + public Builder setWalKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { if (walKeyBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10785,7 +10605,7 @@ public final class RegionAdminProtos { return this; } public Builder setWalKey( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) { if (walKeyBuilder_ == null) { walKey_ = builderForValue.build(); onChanged(); @@ -10795,12 +10615,12 @@ public final class RegionAdminProtos { bitField0_ |= 0x00000001; return this; } - public Builder mergeWalKey(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey value) { + public Builder mergeWalKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { if (walKeyBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - walKey_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance()) { + walKey_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) { walKey_ = - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder(walKey_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(walKey_).mergeFrom(value).buildPartial(); } else { walKey_ = value; } @@ -10813,7 +10633,7 @@ public final class RegionAdminProtos { } public Builder clearWalKey() { if (walKeyBuilder_ == null) { - walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); onChanged(); } else { walKeyBuilder_.clear(); @@ -10821,12 +10641,12 @@ public final class RegionAdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder getWalKeyBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getWalKeyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getWalKeyFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { if (walKeyBuilder_ != null) { return walKeyBuilder_.getMessageOrBuilder(); } else { @@ -10834,11 +10654,11 @@ public final class RegionAdminProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> getWalKeyFieldBuilder() { if (walKeyBuilder_ == null) { walKeyBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>( walKey_, getParentForChildren(), isClean()); @@ -10848,20 +10668,20 @@ public final class RegionAdminProtos { } // required .WALEntry.WALEdit edit = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; public boolean hasEdit() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { if (editBuilder_ == null) { return edit_; } else { return editBuilder_.getMessage(); } } - public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit value) { + public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { if (editBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10875,7 +10695,7 @@ public final class RegionAdminProtos { return this; } public Builder setEdit( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) { if (editBuilder_ == null) { edit_ = builderForValue.build(); onChanged(); @@ -10885,12 +10705,12 @@ public final class RegionAdminProtos { bitField0_ |= 0x00000002; return this; } - public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit value) { + public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { if (editBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - edit_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance()) { + edit_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) { edit_ = - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); } else { edit_ = value; } @@ -10903,7 +10723,7 @@ public final class RegionAdminProtos { } public Builder clearEdit() { if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); onChanged(); } else { editBuilder_.clear(); @@ -10911,12 +10731,12 @@ public final class RegionAdminProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEditFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { if (editBuilder_ != null) { return editBuilder_.getMessageOrBuilder(); } else { @@ -10924,11 +10744,11 @@ public final class RegionAdminProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> getEditFieldBuilder() { if (editBuilder_ == null) { editBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>( edit_, getParentForChildren(), isClean()); @@ -10952,13 +10772,13 @@ public final class RegionAdminProtos { extends com.google.protobuf.MessageOrBuilder { // repeated .WALEntry walEntry = 1; - java.util.List + java.util.List getWalEntryList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getWalEntry(int index); int getWalEntryCount(); - java.util.List + java.util.List getWalEntryOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( int index); } public static final class ReplicateWALEntryRequest extends @@ -10981,31 +10801,31 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; } // repeated .WALEntry walEntry = 1; public static final int WALENTRY_FIELD_NUMBER = 1; - private java.util.List walEntry_; - public java.util.List getWalEntryList() { + private java.util.List walEntry_; + public java.util.List getWalEntryList() { return walEntry_; } - public java.util.List + public java.util.List getWalEntryOrBuilderList() { return walEntry_; } public int getWalEntryCount() { return walEntry_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index) { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getWalEntry(int index) { return walEntry_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( int index) { return walEntry_.get(index); } @@ -11064,10 +10884,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj; boolean result = true; result = result && getWalEntryList() @@ -11089,41 +10909,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -11132,7 +10952,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11143,12 +10963,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11158,7 +10978,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -11171,18 +10991,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -11217,24 +11037,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -11242,8 +11062,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this); int from_bitField0_ = bitField0_; if (walEntryBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -11259,16 +11079,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; if (walEntryBuilder_ == null) { if (!other.walEntry_.isEmpty()) { if (walEntry_.isEmpty()) { @@ -11333,7 +11153,7 @@ public final class RegionAdminProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addWalEntry(subBuilder.buildPartial()); break; @@ -11345,19 +11165,19 @@ public final class RegionAdminProtos { private int bitField0_; // repeated .WALEntry walEntry = 1; - private java.util.List walEntry_ = + private java.util.List walEntry_ = java.util.Collections.emptyList(); private void ensureWalEntryIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - walEntry_ = new java.util.ArrayList(walEntry_); + walEntry_ = new java.util.ArrayList(walEntry_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder> walEntryBuilder_; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> walEntryBuilder_; - public java.util.List getWalEntryList() { + public java.util.List getWalEntryList() { if (walEntryBuilder_ == null) { return java.util.Collections.unmodifiableList(walEntry_); } else { @@ -11371,7 +11191,7 @@ public final class RegionAdminProtos { return walEntryBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index) { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getWalEntry(int index) { if (walEntryBuilder_ == null) { return walEntry_.get(index); } else { @@ -11379,7 +11199,7 @@ public final class RegionAdminProtos { } } public Builder setWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { if (walEntryBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -11393,7 +11213,7 @@ public final class RegionAdminProtos { return this; } public Builder setWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { if (walEntryBuilder_ == null) { ensureWalEntryIsMutable(); walEntry_.set(index, builderForValue.build()); @@ -11403,7 +11223,7 @@ public final class RegionAdminProtos { } return this; } - public Builder addWalEntry(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { + public Builder addWalEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { if (walEntryBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -11417,7 +11237,7 @@ public final class RegionAdminProtos { return this; } public Builder addWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { if (walEntryBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -11431,7 +11251,7 @@ public final class RegionAdminProtos { return this; } public Builder addWalEntry( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { if (walEntryBuilder_ == null) { ensureWalEntryIsMutable(); walEntry_.add(builderForValue.build()); @@ -11442,7 +11262,7 @@ public final class RegionAdminProtos { return this; } public Builder addWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { if (walEntryBuilder_ == null) { ensureWalEntryIsMutable(); walEntry_.add(index, builderForValue.build()); @@ -11453,7 +11273,7 @@ public final class RegionAdminProtos { return this; } public Builder addAllWalEntry( - java.lang.Iterable values) { + java.lang.Iterable values) { if (walEntryBuilder_ == null) { ensureWalEntryIsMutable(); super.addAll(values, walEntry_); @@ -11483,18 +11303,18 @@ public final class RegionAdminProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder getWalEntryBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getWalEntryBuilder( int index) { return getWalEntryFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( int index) { if (walEntryBuilder_ == null) { return walEntry_.get(index); } else { return walEntryBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getWalEntryOrBuilderList() { if (walEntryBuilder_ != null) { return walEntryBuilder_.getMessageOrBuilderList(); @@ -11502,25 +11322,25 @@ public final class RegionAdminProtos { return java.util.Collections.unmodifiableList(walEntry_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder addWalEntryBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addWalEntryBuilder() { return getWalEntryFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder addWalEntryBuilder( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addWalEntryBuilder( int index) { return getWalEntryFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); } - public java.util.List + public java.util.List getWalEntryBuilderList() { return getWalEntryFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> getWalEntryFieldBuilder() { if (walEntryBuilder_ == null) { walEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>( walEntry_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), @@ -11564,12 +11384,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; } private void initFields() { @@ -11612,10 +11432,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj; boolean result = true; result = result && @@ -11631,41 +11451,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -11674,7 +11494,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11685,12 +11505,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11700,7 +11520,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -11713,18 +11533,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -11752,24 +11572,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -11777,23 +11597,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -11864,12 +11684,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; } private void initFields() { @@ -11912,10 +11732,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj; boolean result = true; result = result && @@ -11931,41 +11751,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -11974,7 +11794,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11985,12 +11805,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12000,7 +11820,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -12013,18 +11833,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -12052,24 +11872,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -12077,23 +11897,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -12169,12 +11989,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; } // repeated bytes regionToFlush = 1; @@ -12244,10 +12064,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj; boolean result = true; result = result && getRegionToFlushList() @@ -12269,41 +12089,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -12312,7 +12132,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12323,12 +12143,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12338,7 +12158,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -12351,18 +12171,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -12392,24 +12212,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -12417,8 +12237,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) == 0x00000001)) { regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); @@ -12430,16 +12250,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; if (!other.regionToFlush_.isEmpty()) { if (regionToFlush_.isEmpty()) { regionToFlush_ = other.regionToFlush_; @@ -12581,12 +12401,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; } private int bitField0_; @@ -12674,10 +12494,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj; boolean result = true; result = result && (hasReason() == other.hasReason()); @@ -12702,41 +12522,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -12745,7 +12565,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12756,12 +12576,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12771,7 +12591,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -12784,18 +12604,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -12825,24 +12645,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -12850,8 +12670,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -12864,16 +12684,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this; if (other.hasReason()) { setReason(other.getReason()); } @@ -12993,12 +12813,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; } private void initFields() { @@ -13041,10 +12861,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj; boolean result = true; result = result && @@ -13060,41 +12880,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -13103,7 +12923,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13114,12 +12934,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13129,7 +12949,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -13142,18 +12962,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -13181,24 +13001,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -13206,23 +13026,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -13293,12 +13113,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; } private void initFields() { @@ -13341,10 +13161,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj; boolean result = true; result = result && @@ -13360,41 +13180,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -13403,7 +13223,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13414,12 +13234,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13429,7 +13249,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -13442,18 +13262,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -13481,24 +13301,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -13506,23 +13326,23 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -13598,12 +13418,12 @@ public final class RegionAdminProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; } private int bitField0_; @@ -13676,10 +13496,10 @@ public final class RegionAdminProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj; boolean result = true; result = result && (hasServerName() == other.hasServerName()); @@ -13704,41 +13524,41 @@ public final class RegionAdminProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -13747,7 +13567,7 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13758,12 +13578,12 @@ public final class RegionAdminProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13773,7 +13593,7 @@ public final class RegionAdminProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -13786,18 +13606,18 @@ public final class RegionAdminProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -13832,24 +13652,24 @@ public final class RegionAdminProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -13857,8 +13677,8 @@ public final class RegionAdminProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -13875,16 +13695,16 @@ public final class RegionAdminProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; if (other.hasServerName()) { mergeServerName(other.getServerName()); } @@ -14043,169 +13863,169 @@ public final class RegionAdminProtos { // @@protoc_insertion_point(class_scope:GetServerInfoResponse) } - public static abstract class RegionAdminService + public static abstract class AdminService implements com.google.protobuf.Service { - protected RegionAdminService() {} + protected AdminService() {} public interface Interface { public abstract void getRegionInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); public abstract void getStoreFileList( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done); public abstract void getOnlineRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void openRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void closeRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void flushRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void splitRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void compactRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void replicateWALEntry( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); public abstract void rollWALWriter( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); public abstract void getServerInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); public abstract void stopServer( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { - return new RegionAdminService() { + return new AdminService() { @java.lang.Override public void getRegionInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { impl.getRegionInfo(controller, request, done); } @java.lang.Override public void getStoreFileList( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done) { impl.getStoreFileList(controller, request, done); } @java.lang.Override public void getOnlineRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { impl.getOnlineRegion(controller, request, done); } @java.lang.Override public void openRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { impl.openRegion(controller, request, done); } @java.lang.Override public void closeRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { impl.closeRegion(controller, request, done); } @java.lang.Override public void flushRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { impl.flushRegion(controller, request, done); } @java.lang.Override public void splitRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { impl.splitRegion(controller, request, done); } @java.lang.Override public void compactRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { impl.compactRegion(controller, request, done); } @java.lang.Override public void replicateWALEntry( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { impl.replicateWALEntry(controller, request, done); } @java.lang.Override public void rollWALWriter( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { impl.rollWALWriter(controller, request, done); } @java.lang.Override public void getServerInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { impl.getServerInfo(controller, request, done); } @java.lang.Override public void stopServer( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { impl.stopServer(controller, request, done); } @@ -14232,29 +14052,29 @@ public final class RegionAdminProtos { } switch(method.getIndex()) { case 0: - return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)request); + return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request); case 1: - return impl.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)request); + return impl.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)request); case 2: - return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)request); + return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request); case 3: - return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)request); + return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request); case 4: - return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)request); + return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request); case 5: - return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)request); + return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request); case 6: - return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)request); + return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request); case 7: - return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)request); + return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request); case 8: - return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)request); + return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request); case 9: - return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)request); + return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request); case 10: - return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)request); + return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request); case 11: - return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)request); + return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -14270,29 +14090,29 @@ public final class RegionAdminProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -14308,29 +14128,29 @@ public final class RegionAdminProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -14341,68 +14161,68 @@ public final class RegionAdminProtos { public abstract void getRegionInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); public abstract void getStoreFileList( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done); public abstract void getOnlineRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void openRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void closeRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void flushRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void splitRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void compactRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); public abstract void replicateWALEntry( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); public abstract void rollWALWriter( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); public abstract void getServerInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); public abstract void stopServer( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.getDescriptor().getServices().get(0); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { @@ -14422,63 +14242,63 @@ public final class RegionAdminProtos { } switch(method.getIndex()) { case 0: - this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 1: - this.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 2: - this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 3: - this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 4: - this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 5: - this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 6: - this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 7: - this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 8: - this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 9: - this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 10: - this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 11: - this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; default: @@ -14496,29 +14316,29 @@ public final class RegionAdminProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -14534,29 +14354,29 @@ public final class RegionAdminProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -14567,7 +14387,7 @@ public final class RegionAdminProtos { return new Stub(channel); } - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RegionAdminService implements Interface { + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } @@ -14580,182 +14400,182 @@ public final class RegionAdminProtos { public void getRegionInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance())); } public void getStoreFileList( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance())); } public void getOnlineRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance())); } public void openRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(3), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance())); } public void closeRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(4), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance())); } public void flushRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(5), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance())); } public void splitRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(6), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance())); } public void compactRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(7), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance())); } public void replicateWALEntry( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(8), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); } public void rollWALWriter( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(9), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance())); } public void getServerInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(10), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance())); } public void stopServer( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(11), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance())); } } @@ -14765,64 +14585,64 @@ public final class RegionAdminProtos { } public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getRegionInfo( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getStoreFileList( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse getStoreFileList( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getOnlineRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse openRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse closeRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse flushRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse splitRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse compactRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse replicateWALEntry( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse rollWALWriter( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getServerInfo( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse stopServer( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) throws com.google.protobuf.ServiceException; } @@ -14833,147 +14653,147 @@ public final class RegionAdminProtos { private final com.google.protobuf.BlockingRpcChannel channel; - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getRegionInfo( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getStoreFileList( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse getStoreFileList( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getOnlineRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse openRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(3), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse closeRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(4), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse flushRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(5), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse splitRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(6), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse compactRegion( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(7), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse replicateWALEntry( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(8), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse rollWALWriter( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(9), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getServerInfo( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(10), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse stopServer( + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request) + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(11), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()); } } @@ -15133,75 +14953,75 @@ public final class RegionAdminProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\021RegionAdmin.proto\032\013hbase.proto\"8\n\024GetR" + - "egionInfoRequest\022 \n\006region\030\001 \002(\0132\020.Regio" + - "nSpecifier\"8\n\025GetRegionInfoResponse\022\037\n\nr" + - "egionInfo\030\001 \002(\0132\013.RegionInfo\"Q\n\027GetStore" + - "FileListRequest\022 \n\006region\030\001 \002(\0132\020.Region" + - "Specifier\022\024\n\014columnFamily\030\002 \003(\014\"-\n\030GetSt" + - "oreFileListResponse\022\021\n\tstoreFile\030\001 \003(\t\"\030" + - "\n\026GetOnlineRegionRequest\":\n\027GetOnlineReg" + - "ionResponse\022\037\n\nregionInfo\030\001 \003(\0132\013.Region" + - "Info\"S\n\021OpenRegionRequest\022 \n\006region\030\001 \003(", - "\0132\020.RegionSpecifier\022\034\n\024versionOfOfflineN" + - "ode\030\002 \001(\r\"\234\001\n\022OpenRegionResponse\022<\n\014open" + - "ingState\030\001 \003(\0162&.OpenRegionResponse.Regi" + - "onOpeningState\"H\n\022RegionOpeningState\022\n\n\006" + - "OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_O" + - "PENING\020\002\"r\n\022CloseRegionRequest\022 \n\006region" + - "\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024versionOfClo" + - "singNode\030\002 \001(\r\022\034\n\016transitionInZK\030\003 \001(\010:\004" + - "true\"%\n\023CloseRegionResponse\022\016\n\006closed\030\001 " + - "\002(\010\"M\n\022FlushRegionRequest\022 \n\006region\030\001 \002(", - "\0132\020.RegionSpecifier\022\025\n\rifOlderThanTs\030\002 \001" + - "(\004\"=\n\023FlushRegionResponse\022\025\n\rlastFlushTi" + - "me\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022SplitRegion" + - "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" + - "r\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023SplitRegionResp" + - "onse\"G\n\024CompactRegionRequest\022 \n\006region\030\001" + - " \002(\0132\020.RegionSpecifier\022\r\n\005major\030\002 \001(\010\"\027\n" + - "\025CompactRegionResponse\"1\n\004UUID\022\024\n\014leastS" + - "igBits\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002(\004\"\301\003\n\010WA" + - "LEntry\022 \n\006walKey\030\001 \002(\0132\020.WALEntry.WALKey", - "\022\037\n\004edit\030\002 \002(\0132\021.WALEntry.WALEdit\032~\n\006WAL" + - "Key\022\031\n\021encodedRegionName\030\001 \002(\014\022\021\n\ttableN" + - "ame\030\002 \002(\014\022\031\n\021logSequenceNumber\030\003 \002(\004\022\021\n\t" + - "writeTime\030\004 \002(\004\022\030\n\tclusterId\030\005 \001(\0132\005.UUI" + - "D\032\361\001\n\007WALEdit\022\033\n\010keyValue\030\001 \003(\0132\t.KeyVal" + - "ue\0222\n\013familyScope\030\002 \003(\0132\035.WALEntry.WALEd" + - "it.FamilyScope\032M\n\013FamilyScope\022\016\n\006family\030" + - "\001 \002(\014\022.\n\tscopeType\030\002 \002(\0162\033.WALEntry.WALE" + - "dit.ScopeType\"F\n\tScopeType\022\033\n\027REPLICATIO" + - "N_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLO", - "BAL\020\001\"7\n\030ReplicateWALEntryRequest\022\033\n\010wal" + - "Entry\030\001 \003(\0132\t.WALEntry\"\033\n\031ReplicateWALEn" + - "tryResponse\"\026\n\024RollWALWriterRequest\".\n\025R" + - "ollWALWriterResponse\022\025\n\rregionToFlush\030\001 " + - "\003(\014\"#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t" + - "\"\024\n\022StopServerResponse\"\026\n\024GetServerInfoR" + - "equest\"8\n\025GetServerInfoResponse\022\037\n\nserve" + - "rName\030\001 \002(\0132\013.ServerName2\213\006\n\022RegionAdmin" + - "Service\022>\n\rgetRegionInfo\022\025.GetRegionInfo" + - "Request\032\026.GetRegionInfoResponse\022G\n\020getSt", - "oreFileList\022\030.GetStoreFileListRequest\032\031." + - "GetStoreFileListResponse\022D\n\017getOnlineReg" + - "ion\022\027.GetOnlineRegionRequest\032\030.GetOnline" + - "RegionResponse\0225\n\nopenRegion\022\022.OpenRegio" + - "nRequest\032\023.OpenRegionResponse\0228\n\013closeRe" + - "gion\022\023.CloseRegionRequest\032\024.CloseRegionR" + - "esponse\0228\n\013flushRegion\022\023.FlushRegionRequ" + - "est\032\024.FlushRegionResponse\0228\n\013splitRegion" + - "\022\023.SplitRegionRequest\032\024.SplitRegionRespo" + - "nse\022>\n\rcompactRegion\022\025.CompactRegionRequ", - "est\032\026.CompactRegionResponse\022J\n\021replicate" + - "WALEntry\022\031.ReplicateWALEntryRequest\032\032.Re" + - "plicateWALEntryResponse\022>\n\rrollWALWriter" + - "\022\025.RollWALWriterRequest\032\026.RollWALWriterR" + - "esponse\022>\n\rgetServerInfo\022\025.GetServerInfo" + - "Request\032\026.GetServerInfoResponse\0225\n\nstopS" + - "erver\022\022.StopServerRequest\032\023.StopServerRe" + - "sponseBG\n*org.apache.hadoop.hbase.protob" + - "uf.generatedB\021RegionAdminProtosH\001\210\001\001\240\001\001" + "\n\013Admin.proto\032\013hbase.proto\"8\n\024GetRegionI" + + "nfoRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + + "fier\"8\n\025GetRegionInfoResponse\022\037\n\nregionI" + + "nfo\030\001 \002(\0132\013.RegionInfo\"Q\n\027GetStoreFileLi" + + "stRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif" + + "ier\022\024\n\014columnFamily\030\002 \003(\014\"-\n\030GetStoreFil" + + "eListResponse\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetO" + + "nlineRegionRequest\":\n\027GetOnlineRegionRes" + + "ponse\022\037\n\nregionInfo\030\001 \003(\0132\013.RegionInfo\"S" + + "\n\021OpenRegionRequest\022 \n\006region\030\001 \003(\0132\020.Re", + "gionSpecifier\022\034\n\024versionOfOfflineNode\030\002 " + + "\001(\r\"\234\001\n\022OpenRegionResponse\022<\n\014openingSta" + + "te\030\001 \003(\0162&.OpenRegionResponse.RegionOpen" + + "ingState\"H\n\022RegionOpeningState\022\n\n\006OPENED" + + "\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_OPENING" + + "\020\002\"r\n\022CloseRegionRequest\022 \n\006region\030\001 \002(\013" + + "2\020.RegionSpecifier\022\034\n\024versionOfClosingNo" + + "de\030\002 \001(\r\022\034\n\016transitionInZK\030\003 \001(\010:\004true\"%" + + "\n\023CloseRegionResponse\022\016\n\006closed\030\001 \002(\010\"M\n" + + "\022FlushRegionRequest\022 \n\006region\030\001 \002(\0132\020.Re", + "gionSpecifier\022\025\n\rifOlderThanTs\030\002 \001(\004\"=\n\023" + + "FlushRegionResponse\022\025\n\rlastFlushTime\030\001 \002" + + "(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022SplitRegionReques" + + "t\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\022\n\ns" + + "plitPoint\030\002 \001(\014\"\025\n\023SplitRegionResponse\"G" + + "\n\024CompactRegionRequest\022 \n\006region\030\001 \002(\0132\020" + + ".RegionSpecifier\022\r\n\005major\030\002 \001(\010\"\027\n\025Compa" + + "ctRegionResponse\"1\n\004UUID\022\024\n\014leastSigBits" + + "\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002(\004\"\266\003\n\010WALEntry" + + "\022 \n\006walKey\030\001 \002(\0132\020.WALEntry.WALKey\022\037\n\004ed", + "it\030\002 \002(\0132\021.WALEntry.WALEdit\032~\n\006WALKey\022\031\n" + + "\021encodedRegionName\030\001 \002(\014\022\021\n\ttableName\030\002 " + + "\002(\014\022\031\n\021logSequenceNumber\030\003 \002(\004\022\021\n\twriteT" + + "ime\030\004 \002(\004\022\030\n\tclusterId\030\005 \001(\0132\005.UUID\032\346\001\n\007" + + "WALEdit\022\020\n\010keyValue\030\001 \003(\014\0222\n\013familyScope" + + "\030\002 \003(\0132\035.WALEntry.WALEdit.FamilyScope\032M\n" + + "\013FamilyScope\022\016\n\006family\030\001 \002(\014\022.\n\tscopeTyp" + + "e\030\002 \002(\0162\033.WALEntry.WALEdit.ScopeType\"F\n\t" + + "ScopeType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034" + + "\n\030REPLICATION_SCOPE_GLOBAL\020\001\"7\n\030Replicat", + "eWALEntryRequest\022\033\n\010walEntry\030\001 \003(\0132\t.WAL" + + "Entry\"\033\n\031ReplicateWALEntryResponse\"\026\n\024Ro" + + "llWALWriterRequest\".\n\025RollWALWriterRespo" + + "nse\022\025\n\rregionToFlush\030\001 \003(\014\"#\n\021StopServer" + + "Request\022\016\n\006reason\030\001 \002(\t\"\024\n\022StopServerRes" + + "ponse\"\026\n\024GetServerInfoRequest\"8\n\025GetServ" + + "erInfoResponse\022\037\n\nserverName\030\001 \002(\0132\013.Ser" + + "verName2\205\006\n\014AdminService\022>\n\rgetRegionInf" + + "o\022\025.GetRegionInfoRequest\032\026.GetRegionInfo" + + "Response\022G\n\020getStoreFileList\022\030.GetStoreF", + "ileListRequest\032\031.GetStoreFileListRespons" + + "e\022D\n\017getOnlineRegion\022\027.GetOnlineRegionRe" + + "quest\032\030.GetOnlineRegionResponse\0225\n\nopenR" + + "egion\022\022.OpenRegionRequest\032\023.OpenRegionRe" + + "sponse\0228\n\013closeRegion\022\023.CloseRegionReque" + + "st\032\024.CloseRegionResponse\0228\n\013flushRegion\022" + + "\023.FlushRegionRequest\032\024.FlushRegionRespon" + + "se\0228\n\013splitRegion\022\023.SplitRegionRequest\032\024" + + ".SplitRegionResponse\022>\n\rcompactRegion\022\025." + + "CompactRegionRequest\032\026.CompactRegionResp", + "onse\022J\n\021replicateWALEntry\022\031.ReplicateWAL" + + "EntryRequest\032\032.ReplicateWALEntryResponse" + + "\022>\n\rrollWALWriter\022\025.RollWALWriterRequest" + + "\032\026.RollWALWriterResponse\022>\n\rgetServerInf" + + "o\022\025.GetServerInfoRequest\032\026.GetServerInfo" + + "Response\0225\n\nstopServer\022\022.StopServerReque" + + "st\032\023.StopServerResponseBA\n*org.apache.ha" + + "doop.hbase.protobuf.generatedB\013AdminProt" + + "osH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -15214,232 +15034,232 @@ public final class RegionAdminProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRegionInfoRequest_descriptor, new java.lang.String[] { "Region", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); internal_static_GetRegionInfoResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_GetRegionInfoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRegionInfoResponse_descriptor, new java.lang.String[] { "RegionInfo", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); internal_static_GetStoreFileListRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_GetStoreFileListRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetStoreFileListRequest_descriptor, new java.lang.String[] { "Region", "ColumnFamily", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.Builder.class); internal_static_GetStoreFileListResponse_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_GetStoreFileListResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetStoreFileListResponse_descriptor, new java.lang.String[] { "StoreFile", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.Builder.class); internal_static_GetOnlineRegionRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_GetOnlineRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetOnlineRegionRequest_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); internal_static_GetOnlineRegionResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_GetOnlineRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetOnlineRegionResponse_descriptor, new java.lang.String[] { "RegionInfo", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); internal_static_OpenRegionRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_OpenRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OpenRegionRequest_descriptor, new java.lang.String[] { "Region", "VersionOfOfflineNode", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); internal_static_OpenRegionResponse_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_OpenRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_OpenRegionResponse_descriptor, new java.lang.String[] { "OpeningState", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); internal_static_CloseRegionRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_CloseRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CloseRegionRequest_descriptor, new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); internal_static_CloseRegionResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_CloseRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CloseRegionResponse_descriptor, new java.lang.String[] { "Closed", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); internal_static_FlushRegionRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_FlushRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FlushRegionRequest_descriptor, new java.lang.String[] { "Region", "IfOlderThanTs", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); internal_static_FlushRegionResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_FlushRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FlushRegionResponse_descriptor, new java.lang.String[] { "LastFlushTime", "Flushed", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); internal_static_SplitRegionRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_SplitRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SplitRegionRequest_descriptor, new java.lang.String[] { "Region", "SplitPoint", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); internal_static_SplitRegionResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_SplitRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SplitRegionResponse_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); internal_static_CompactRegionRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_CompactRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CompactRegionRequest_descriptor, new java.lang.String[] { "Region", "Major", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); internal_static_CompactRegionResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_CompactRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CompactRegionResponse_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); internal_static_UUID_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UUID_descriptor, new java.lang.String[] { "LeastSigBits", "MostSigBits", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); internal_static_WALEntry_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_WALEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_descriptor, new java.lang.String[] { "WalKey", "Edit", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); internal_static_WALEntry_WALKey_descriptor = internal_static_WALEntry_descriptor.getNestedTypes().get(0); internal_static_WALEntry_WALKey_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_WALKey_descriptor, new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); internal_static_WALEntry_WALEdit_descriptor = internal_static_WALEntry_descriptor.getNestedTypes().get(1); internal_static_WALEntry_WALEdit_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_WALEdit_descriptor, new java.lang.String[] { "KeyValue", "FamilyScope", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); internal_static_WALEntry_WALEdit_FamilyScope_descriptor = internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_WALEdit_FamilyScope_descriptor, new java.lang.String[] { "Family", "ScopeType", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); internal_static_ReplicateWALEntryRequest_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicateWALEntryRequest_descriptor, new java.lang.String[] { "WalEntry", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); internal_static_ReplicateWALEntryResponse_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicateWALEntryResponse_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); internal_static_RollWALWriterRequest_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_RollWALWriterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RollWALWriterRequest_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); internal_static_RollWALWriterResponse_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_RollWALWriterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RollWALWriterResponse_descriptor, new java.lang.String[] { "RegionToFlush", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); internal_static_StopServerRequest_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_StopServerRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopServerRequest_descriptor, new java.lang.String[] { "Reason", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); internal_static_StopServerResponse_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_StopServerResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopServerResponse_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); internal_static_GetServerInfoRequest_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_GetServerInfoRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetServerInfoRequest_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); internal_static_GetServerInfoResponse_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_GetServerInfoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetServerInfoResponse_descriptor, new java.lang.String[] { "ServerName", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); return null; } }; diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java similarity index 69% rename from src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java rename to src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java index b36a9c07ba0..de820e27366 100644 --- a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -1,10 +1,10 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: RegionClient.proto +// source: Client.proto package org.apache.hadoop.hbase.protobuf.generated; -public final class RegionClientProtos { - private RegionClientProtos() {} +public final class ClientProtos { + private ClientProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } @@ -40,12 +40,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; } private int bitField0_; @@ -138,10 +138,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); @@ -172,41 +172,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -215,7 +215,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -226,12 +226,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -241,7 +241,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -254,18 +254,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -297,24 +297,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -322,8 +322,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -341,16 +341,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } @@ -501,491 +501,6 @@ public final class RegionClientProtos { // @@protoc_insertion_point(class_scope:Column) } - public interface AttributeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // optional bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class Attribute extends - com.google.protobuf.GeneratedMessage - implements AttributeOrBuilder { - // Use Attribute.newBuilder() to construct. - private Attribute(Builder builder) { - super(builder); - } - private Attribute(boolean noInit) {} - - private static final Attribute defaultInstance; - public static Attribute getDefaultInstance() { - return defaultInstance; - } - - public Attribute getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - name_ = ""; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // optional bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Attribute) - } - - static { - defaultInstance = new Attribute(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Attribute) - } - public interface GetOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -994,33 +509,33 @@ public final class RegionClientProtos { com.google.protobuf.ByteString getRow(); // repeated .Column column = 2; - java.util.List + java.util.List getColumnList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); int getColumnCount(); - java.util.List + java.util.List getColumnOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); - // repeated .Attribute attribute = 3; - java.util.List + // repeated .NameBytesPair attribute = 3; + java.util.List getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); int getAttributeCount(); - java.util.List + java.util.List getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); // optional uint64 lockId = 4; boolean hasLockId(); long getLockId(); - // optional .Parameter filter = 5; + // optional .NameBytesPair filter = 5; boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder(); // optional .TimeRange timeRange = 6; boolean hasTimeRange(); @@ -1055,12 +570,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; } private int bitField0_; @@ -1076,42 +591,42 @@ public final class RegionClientProtos { // repeated .Column column = 2; public static final int COLUMN_FIELD_NUMBER = 2; - private java.util.List column_; - public java.util.List getColumnList() { + private java.util.List column_; + public java.util.List getColumnList() { return column_; } - public java.util.List + public java.util.List getColumnOrBuilderList() { return column_; } public int getColumnCount() { return column_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { return column_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { return column_.get(index); } - // repeated .Attribute attribute = 3; + // repeated .NameBytesPair attribute = 3; public static final int ATTRIBUTE_FIELD_NUMBER = 3; - private java.util.List attribute_; - public java.util.List getAttributeList() { + private java.util.List attribute_; + public java.util.List getAttributeList() { return attribute_; } - public java.util.List + public java.util.List getAttributeOrBuilderList() { return attribute_; } public int getAttributeCount() { return attribute_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } @@ -1126,16 +641,16 @@ public final class RegionClientProtos { return lockId_; } - // optional .Parameter filter = 5; + // optional .NameBytesPair filter = 5; public static final int FILTER_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_; public boolean hasFilter() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { return filter_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { return filter_; } @@ -1177,7 +692,7 @@ public final class RegionClientProtos { column_ = java.util.Collections.emptyList(); attribute_ = java.util.Collections.emptyList(); lockId_ = 0L; - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); maxVersions_ = 1; cacheBlocks_ = true; @@ -1298,10 +813,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); @@ -1383,41 +898,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -1426,7 +941,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1437,12 +952,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1452,7 +967,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -1465,18 +980,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1516,7 +1031,7 @@ public final class RegionClientProtos { lockId_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } else { filterBuilder_.clear(); } @@ -1540,24 +1055,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -1565,8 +1080,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -1625,16 +1140,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } @@ -1764,13 +1279,13 @@ public final class RegionClientProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addColumn(subBuilder.buildPartial()); break; } case 26: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addAttribute(subBuilder.buildPartial()); break; @@ -1781,7 +1296,7 @@ public final class RegionClientProtos { break; } case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); if (hasFilter()) { subBuilder.mergeFrom(getFilter()); } @@ -1839,19 +1354,19 @@ public final class RegionClientProtos { } // repeated .Column column = 2; - private java.util.List column_ = + private java.util.List column_ = java.util.Collections.emptyList(); private void ensureColumnIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { - column_ = new java.util.ArrayList(column_); + column_ = new java.util.ArrayList(column_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> columnBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; - public java.util.List getColumnList() { + public java.util.List getColumnList() { if (columnBuilder_ == null) { return java.util.Collections.unmodifiableList(column_); } else { @@ -1865,7 +1380,7 @@ public final class RegionClientProtos { return columnBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { if (columnBuilder_ == null) { return column_.get(index); } else { @@ -1873,7 +1388,7 @@ public final class RegionClientProtos { } } public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -1887,7 +1402,7 @@ public final class RegionClientProtos { return this; } public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.set(index, builderForValue.build()); @@ -1897,7 +1412,7 @@ public final class RegionClientProtos { } return this; } - public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -1911,7 +1426,7 @@ public final class RegionClientProtos { return this; } public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -1925,7 +1440,7 @@ public final class RegionClientProtos { return this; } public Builder addColumn( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(builderForValue.build()); @@ -1936,7 +1451,7 @@ public final class RegionClientProtos { return this; } public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(index, builderForValue.build()); @@ -1947,7 +1462,7 @@ public final class RegionClientProtos { return this; } public Builder addAllColumn( - java.lang.Iterable values) { + java.lang.Iterable values) { if (columnBuilder_ == null) { ensureColumnIsMutable(); super.addAll(values, column_); @@ -1977,18 +1492,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder getColumnBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( int index) { return getColumnFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { if (columnBuilder_ == null) { return column_.get(index); } else { return columnBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getColumnOrBuilderList() { if (columnBuilder_ != null) { return columnBuilder_.getMessageOrBuilderList(); @@ -1996,25 +1511,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(column_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { return getColumnFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( int index) { return getColumnFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } - public java.util.List + public java.util.List getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder() { if (columnBuilder_ == null) { columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( column_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), @@ -2024,20 +1539,20 @@ public final class RegionClientProtos { return columnBuilder_; } - // repeated .Attribute attribute = 3; - private java.util.List attribute_ = + // repeated .NameBytesPair attribute = 3; + private java.util.List attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { - attribute_ = new java.util.ArrayList(attribute_); + attribute_ = new java.util.ArrayList(attribute_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - public java.util.List getAttributeList() { + public java.util.List getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); } else { @@ -2051,7 +1566,7 @@ public final class RegionClientProtos { return attributeBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { @@ -2059,7 +1574,7 @@ public final class RegionClientProtos { } } public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -2073,7 +1588,7 @@ public final class RegionClientProtos { return this; } public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.set(index, builderForValue.build()); @@ -2083,7 +1598,7 @@ public final class RegionClientProtos { } return this; } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -2097,7 +1612,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -2111,7 +1626,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(builderForValue.build()); @@ -2122,7 +1637,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(index, builderForValue.build()); @@ -2133,7 +1648,7 @@ public final class RegionClientProtos { return this; } public Builder addAllAttribute( - java.lang.Iterable values) { + java.lang.Iterable values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); super.addAll(values, attribute_); @@ -2163,18 +1678,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getAttributeOrBuilderList() { if (attributeBuilder_ != null) { return attributeBuilder_.getMessageOrBuilderList(); @@ -2182,25 +1697,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(attribute_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public java.util.List + public java.util.List getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), @@ -2231,21 +1746,21 @@ public final class RegionClientProtos { return this; } - // optional .Parameter filter = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + // optional .NameBytesPair filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> filterBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> filterBuilder_; public boolean hasFilter() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { if (filterBuilder_ == null) { return filter_; } else { return filterBuilder_.getMessage(); } } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -2259,7 +1774,7 @@ public final class RegionClientProtos { return this; } public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); @@ -2269,12 +1784,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000010; return this; } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { filter_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(filter_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } @@ -2287,7 +1802,7 @@ public final class RegionClientProtos { } public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); onChanged(); } else { filterBuilder_.clear(); @@ -2295,12 +1810,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getFilterBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getFilterBuilder() { bitField0_ |= 0x00000010; onChanged(); return getFilterFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { @@ -2308,11 +1823,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( filter_, getParentForChildren(), isClean()); @@ -2467,15 +1982,10 @@ public final class RegionClientProtos { public interface ResultOrBuilder extends com.google.protobuf.MessageOrBuilder { - // repeated .KeyValue value = 1; - java.util.List - getValueList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index); - int getValueCount(); - java.util.List - getValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( - int index); + // repeated bytes keyValueBytes = 1; + java.util.List getKeyValueBytesList(); + int getKeyValueBytesCount(); + com.google.protobuf.ByteString getKeyValueBytes(int index); } public static final class Result extends com.google.protobuf.GeneratedMessage @@ -2497,49 +2007,36 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; } - // repeated .KeyValue value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private java.util.List value_; - public java.util.List getValueList() { - return value_; + // repeated bytes keyValueBytes = 1; + public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; + private java.util.List keyValueBytes_; + public java.util.List + getKeyValueBytesList() { + return keyValueBytes_; } - public java.util.List - getValueOrBuilderList() { - return value_; + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); } - public int getValueCount() { - return value_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index) { - return value_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( - int index) { - return value_.get(index); + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); } private void initFields() { - value_ = java.util.Collections.emptyList(); + keyValueBytes_ = java.util.Collections.emptyList();; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - for (int i = 0; i < getValueCount(); i++) { - if (!getValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } memoizedIsInitialized = 1; return true; } @@ -2547,8 +2044,8 @@ public final class RegionClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - for (int i = 0; i < value_.size(); i++) { - output.writeMessage(1, value_.get(i)); + for (int i = 0; i < keyValueBytes_.size(); i++) { + output.writeBytes(1, keyValueBytes_.get(i)); } getUnknownFields().writeTo(output); } @@ -2559,9 +2056,14 @@ public final class RegionClientProtos { if (size != -1) return size; size = 0; - for (int i = 0; i < value_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, value_.get(i)); + { + int dataSize = 0; + for (int i = 0; i < keyValueBytes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValueBytes_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueBytesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -2580,14 +2082,14 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj; boolean result = true; - result = result && getValueList() - .equals(other.getValueList()); + result = result && getKeyValueBytesList() + .equals(other.getKeyValueBytesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -2597,49 +2099,49 @@ public final class RegionClientProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getValueCount() > 0) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValueList().hashCode(); + if (getKeyValueBytesCount() > 0) { + hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueBytesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -2648,7 +2150,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2659,12 +2161,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2674,7 +2176,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2687,18 +2189,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2709,7 +2211,6 @@ public final class RegionClientProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getValueFieldBuilder(); } } private static Builder create() { @@ -2718,12 +2219,8 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); - if (valueBuilder_ == null) { - value_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - valueBuilder_.clear(); - } + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -2733,24 +2230,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -2758,70 +2255,44 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this); int from_bitField0_ = bitField0_; - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - value_ = java.util.Collections.unmodifiableList(value_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); + bitField0_ = (bitField0_ & ~0x00000001); } + result.keyValueBytes_ = keyValueBytes_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) return this; - if (valueBuilder_ == null) { - if (!other.value_.isEmpty()) { - if (value_.isEmpty()) { - value_ = other.value_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureValueIsMutable(); - value_.addAll(other.value_); - } - onChanged(); - } - } else { - if (!other.value_.isEmpty()) { - if (valueBuilder_.isEmpty()) { - valueBuilder_.dispose(); - valueBuilder_ = null; - value_ = other.value_; - bitField0_ = (bitField0_ & ~0x00000001); - valueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getValueFieldBuilder() : null; - } else { - valueBuilder_.addAllMessages(other.value_); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; + if (!other.keyValueBytes_.isEmpty()) { + if (keyValueBytes_.isEmpty()) { + keyValueBytes_ = other.keyValueBytes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.addAll(other.keyValueBytes_); } + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - for (int i = 0; i < getValueCount(); i++) { - if (!getValue(i).isInitialized()) { - - return false; - } - } return true; } @@ -2849,9 +2320,8 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addValue(subBuilder.buildPartial()); + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(input.readBytes()); break; } } @@ -2860,191 +2330,56 @@ public final class RegionClientProtos { private int bitField0_; - // repeated .KeyValue value = 1; - private java.util.List value_ = - java.util.Collections.emptyList(); - private void ensureValueIsMutable() { + // repeated bytes keyValueBytes = 1; + private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; + private void ensureKeyValueBytesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - value_ = new java.util.ArrayList(value_); + keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); bitField0_ |= 0x00000001; } } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> valueBuilder_; - - public java.util.List getValueList() { - if (valueBuilder_ == null) { - return java.util.Collections.unmodifiableList(value_); - } else { - return valueBuilder_.getMessageList(); - } + public java.util.List + getKeyValueBytesList() { + return java.util.Collections.unmodifiableList(keyValueBytes_); } - public int getValueCount() { - if (valueBuilder_ == null) { - return value_.size(); - } else { - return valueBuilder_.getCount(); - } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index) { - if (valueBuilder_ == null) { - return value_.get(index); - } else { - return valueBuilder_.getMessage(index); - } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); } - public Builder setValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureValueIsMutable(); - value_.set(index, value); - onChanged(); - } else { - valueBuilder_.setMessage(index, value); - } + public Builder setKeyValueBytes( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.set(index, value); + onChanged(); return this; } - public Builder setValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.set(index, builderForValue.build()); - onChanged(); - } else { - valueBuilder_.setMessage(index, builderForValue.build()); - } + public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(value); + onChanged(); return this; } - public Builder addValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureValueIsMutable(); - value_.add(value); - onChanged(); - } else { - valueBuilder_.addMessage(value); - } + public Builder addAllKeyValueBytes( + java.lang.Iterable values) { + ensureKeyValueBytesIsMutable(); + super.addAll(values, keyValueBytes_); + onChanged(); return this; } - public Builder addValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureValueIsMutable(); - value_.add(index, value); - onChanged(); - } else { - valueBuilder_.addMessage(index, value); - } + public Builder clearKeyValueBytes() { + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); return this; } - public Builder addValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.add(builderForValue.build()); - onChanged(); - } else { - valueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.add(index, builderForValue.build()); - onChanged(); - } else { - valueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllValue( - java.lang.Iterable values) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - super.addAll(values, value_); - onChanged(); - } else { - valueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - valueBuilder_.clear(); - } - return this; - } - public Builder removeValue(int index) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.remove(index); - onChanged(); - } else { - valueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder getValueBuilder( - int index) { - return getValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( - int index) { - if (valueBuilder_ == null) { - return value_.get(index); } else { - return valueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getValueOrBuilderList() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(value_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addValueBuilder() { - return getValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addValueBuilder( - int index) { - return getValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public java.util.List - getValueBuilderList() { - return getValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder>( - value_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } // @@protoc_insertion_point(builder_scope:Result) } @@ -3067,8 +2402,8 @@ public final class RegionClientProtos { // required .Get get = 2; boolean hasGet(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); // optional bool closestRowBefore = 3; boolean hasClosestRowBefore(); @@ -3098,12 +2433,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; } private int bitField0_; @@ -3122,14 +2457,14 @@ public final class RegionClientProtos { // required .Get get = 2; public static final int GET_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get get_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { return get_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { return get_; } @@ -3155,7 +2490,7 @@ public final class RegionClientProtos { private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); closestRowBefore_ = false; existenceOnly_ = false; } @@ -3241,10 +2576,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -3296,41 +2631,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -3339,7 +2674,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3350,12 +2685,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3365,7 +2700,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -3378,18 +2713,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -3417,7 +2752,7 @@ public final class RegionClientProtos { } bitField0_ = (bitField0_ & ~0x00000001); if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); } else { getBuilder_.clear(); } @@ -3435,24 +2770,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -3460,8 +2795,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -3494,16 +2829,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -3573,7 +2908,7 @@ public final class RegionClientProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); if (hasGet()) { subBuilder.mergeFrom(getGet()); } @@ -3688,20 +3023,20 @@ public final class RegionClientProtos { } // required .Get get = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder> getBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; public boolean hasGet() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { if (getBuilder_ == null) { return get_; } else { return getBuilder_.getMessage(); } } - public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get value) { + public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -3715,7 +3050,7 @@ public final class RegionClientProtos { return this; } public Builder setGet( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { if (getBuilder_ == null) { get_ = builderForValue.build(); onChanged(); @@ -3725,12 +3060,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000002; return this; } - public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get value) { + public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { if (getBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - get_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance()) { + get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { get_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); } else { get_ = value; } @@ -3743,7 +3078,7 @@ public final class RegionClientProtos { } public Builder clearGet() { if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); onChanged(); } else { getBuilder_.clear(); @@ -3751,12 +3086,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder getGetBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { bitField0_ |= 0x00000002; onChanged(); return getGetFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { if (getBuilder_ != null) { return getBuilder_.getMessageOrBuilder(); } else { @@ -3764,11 +3099,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder() { if (getBuilder_ == null) { getBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( get_, getParentForChildren(), isClean()); @@ -3835,8 +3170,8 @@ public final class RegionClientProtos { // optional .Result result = 1; boolean hasResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); // optional bool exists = 2; boolean hasExists(); @@ -3862,25 +3197,25 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; } private int bitField0_; // optional .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { return result_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_; } @@ -3895,7 +3230,7 @@ public final class RegionClientProtos { } private void initFields() { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); exists_ = false; } private byte memoizedIsInitialized = -1; @@ -3903,12 +3238,6 @@ public final class RegionClientProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (hasResult()) { - if (!getResult().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } memoizedIsInitialized = 1; return true; } @@ -3956,10 +3285,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj; boolean result = true; result = result && (hasResult() == other.hasResult()); @@ -3993,41 +3322,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -4036,7 +3365,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4047,12 +3376,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4062,7 +3391,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -4075,18 +3404,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -4107,7 +3436,7 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); } else { resultBuilder_.clear(); } @@ -4123,24 +3452,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -4148,8 +3477,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -4170,16 +3499,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; if (other.hasResult()) { mergeResult(other.getResult()); } @@ -4191,12 +3520,6 @@ public final class RegionClientProtos { } public final boolean isInitialized() { - if (hasResult()) { - if (!getResult().isInitialized()) { - - return false; - } - } return true; } @@ -4224,7 +3547,7 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); if (hasResult()) { subBuilder.mergeFrom(getResult()); } @@ -4244,20 +3567,20 @@ public final class RegionClientProtos { private int bitField0_; // optional .Result result = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_; } else { return resultBuilder_.getMessage(); } } - public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -4271,7 +3594,7 @@ public final class RegionClientProtos { return this; } public Builder setResult( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); @@ -4281,12 +3604,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000001; return this; } - public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - result_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) { + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); } else { result_ = value; } @@ -4299,7 +3622,7 @@ public final class RegionClientProtos { } public Builder clearResult() { if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); onChanged(); } else { resultBuilder_.clear(); @@ -4307,12 +3630,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResultFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { @@ -4320,11 +3643,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( result_, getParentForChildren(), isClean()); @@ -4382,15 +3705,12 @@ public final class RegionClientProtos { // required .Condition.CompareType compareType = 4; boolean hasCompareType(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType getCompareType(); - // required .Condition.Comparator comparator = 5; + // required .NameBytesPair comparator = 5; boolean hasComparator(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator(); - - // optional bytes value = 6; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getComparator(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getComparatorOrBuilder(); } public static final class Condition extends com.google.protobuf.GeneratedMessage @@ -4412,12 +3732,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; } public enum CompareType @@ -4477,7 +3797,7 @@ public final class RegionClientProtos { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor().getEnumTypes().get(0); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor().getEnumTypes().get(0); } private static final CompareType[] VALUES = { @@ -4504,93 +3824,6 @@ public final class RegionClientProtos { // @@protoc_insertion_point(enum_scope:Condition.CompareType) } - public enum Comparator - implements com.google.protobuf.ProtocolMessageEnum { - BINARY_COMPARATOR(0, 0), - BINARY_PREFIX_COMPARATOR(1, 1), - BIT_AND_COMPARATOR(2, 2), - BIT_OR_COMPARATOR(3, 3), - BIT_XOR_COMPARATOR(4, 4), - NULL_COMPARATOR(5, 5), - REGEX_STRING_COMPARATOR(6, 6), - SUBSTRING_COMPARATOR(7, 7), - ; - - public static final int BINARY_COMPARATOR_VALUE = 0; - public static final int BINARY_PREFIX_COMPARATOR_VALUE = 1; - public static final int BIT_AND_COMPARATOR_VALUE = 2; - public static final int BIT_OR_COMPARATOR_VALUE = 3; - public static final int BIT_XOR_COMPARATOR_VALUE = 4; - public static final int NULL_COMPARATOR_VALUE = 5; - public static final int REGEX_STRING_COMPARATOR_VALUE = 6; - public static final int SUBSTRING_COMPARATOR_VALUE = 7; - - - public final int getNumber() { return value; } - - public static Comparator valueOf(int value) { - switch (value) { - case 0: return BINARY_COMPARATOR; - case 1: return BINARY_PREFIX_COMPARATOR; - case 2: return BIT_AND_COMPARATOR; - case 3: return BIT_OR_COMPARATOR; - case 4: return BIT_XOR_COMPARATOR; - case 5: return NULL_COMPARATOR; - case 6: return REGEX_STRING_COMPARATOR; - case 7: return SUBSTRING_COMPARATOR; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Comparator findValueByNumber(int number) { - return Comparator.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor().getEnumTypes().get(1); - } - - private static final Comparator[] VALUES = { - BINARY_COMPARATOR, BINARY_PREFIX_COMPARATOR, BIT_AND_COMPARATOR, BIT_OR_COMPARATOR, BIT_XOR_COMPARATOR, NULL_COMPARATOR, REGEX_STRING_COMPARATOR, SUBSTRING_COMPARATOR, - }; - - public static Comparator valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Comparator(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Condition.Comparator) - } - private int bitField0_; // required bytes row = 1; public static final int ROW_FIELD_NUMBER = 1; @@ -4624,41 +3857,33 @@ public final class RegionClientProtos { // required .Condition.CompareType compareType = 4; public static final int COMPARETYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType compareType_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType compareType_; public boolean hasCompareType() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType getCompareType() { return compareType_; } - // required .Condition.Comparator comparator = 5; + // required .NameBytesPair comparator = 5; public static final int COMPARATOR_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator comparator_; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair comparator_; public boolean hasComparator() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getComparator() { return comparator_; } - - // optional bytes value = 6; - public static final int VALUE_FIELD_NUMBER = 6; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public com.google.protobuf.ByteString getValue() { - return value_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getComparatorOrBuilder() { + return comparator_; } private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; family_ = com.google.protobuf.ByteString.EMPTY; qualifier_ = com.google.protobuf.ByteString.EMPTY; - compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; - value_ = com.google.protobuf.ByteString.EMPTY; + compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; + comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -4685,6 +3910,10 @@ public final class RegionClientProtos { memoizedIsInitialized = 0; return false; } + if (!getComparator().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -4705,10 +3934,7 @@ public final class RegionClientProtos { output.writeEnum(4, compareType_.getNumber()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeEnum(5, comparator_.getNumber()); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBytes(6, value_); + output.writeMessage(5, comparator_); } getUnknownFields().writeTo(output); } @@ -4737,11 +3963,7 @@ public final class RegionClientProtos { } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(5, comparator_.getNumber()); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, value_); + .computeMessageSize(5, comparator_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -4760,10 +3982,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); @@ -4788,13 +4010,8 @@ public final class RegionClientProtos { } result = result && (hasComparator() == other.hasComparator()); if (hasComparator()) { - result = result && - (getComparator() == other.getComparator()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); + result = result && getComparator() + .equals(other.getComparator()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -4823,51 +4040,47 @@ public final class RegionClientProtos { } if (hasComparator()) { hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getComparator()); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); + hash = (53 * hash) + getComparator().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -4876,7 +4089,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4887,12 +4100,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4902,7 +4115,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -4915,18 +4128,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -4937,6 +4150,7 @@ public final class RegionClientProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparatorFieldBuilder(); } } private static Builder create() { @@ -4951,12 +4165,14 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000002); qualifier_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); - compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; bitField0_ = (bitField0_ & ~0x00000008); - comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + comparatorBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000010); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000020); return this; } @@ -4966,24 +4182,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -4991,8 +4207,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -5014,27 +4230,27 @@ public final class RegionClientProtos { if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } - result.comparator_ = comparator_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; + if (comparatorBuilder_ == null) { + result.comparator_ = comparator_; + } else { + result.comparator_ = comparatorBuilder_.build(); } - result.value_ = value_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } @@ -5048,10 +4264,7 @@ public final class RegionClientProtos { setCompareType(other.getCompareType()); } if (other.hasComparator()) { - setComparator(other.getComparator()); - } - if (other.hasValue()) { - setValue(other.getValue()); + mergeComparator(other.getComparator()); } this.mergeUnknownFields(other.getUnknownFields()); return this; @@ -5078,6 +4291,10 @@ public final class RegionClientProtos { return false; } + if (!getComparator().isInitialized()) { + + return false; + } return true; } @@ -5121,7 +4338,7 @@ public final class RegionClientProtos { } case 32: { int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.valueOf(rawValue); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { @@ -5130,20 +4347,13 @@ public final class RegionClientProtos { } break; } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - comparator_ = value; + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasComparator()) { + subBuilder.mergeFrom(getComparator()); } - break; - } - case 50: { - bitField0_ |= 0x00000020; - value_ = input.readBytes(); + input.readMessage(subBuilder, extensionRegistry); + setComparator(subBuilder.buildPartial()); break; } } @@ -5225,14 +4435,14 @@ public final class RegionClientProtos { } // required .Condition.CompareType compareType = 4; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; public boolean hasCompareType() { return ((bitField0_ & 0x00000008) == 0x00000008); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType getCompareType() { return compareType_; } - public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType value) { + public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType value) { if (value == null) { throw new NullPointerException(); } @@ -5243,57 +4453,99 @@ public final class RegionClientProtos { } public Builder clearCompareType() { bitField0_ = (bitField0_ & ~0x00000008); - compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; onChanged(); return this; } - // required .Condition.Comparator comparator = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; + // required .NameBytesPair comparator = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> comparatorBuilder_; public boolean hasComparator() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator() { - return comparator_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getComparator() { + if (comparatorBuilder_ == null) { + return comparator_; + } else { + return comparatorBuilder_.getMessage(); + } } - public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator value) { - if (value == null) { - throw new NullPointerException(); + public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (comparatorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparator_ = value; + onChanged(); + } else { + comparatorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setComparator( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (comparatorBuilder_ == null) { + comparator_ = builderForValue.build(); + onChanged(); + } else { + comparatorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (comparatorBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + comparator_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + comparator_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(comparator_).mergeFrom(value).buildPartial(); + } else { + comparator_ = value; + } + onChanged(); + } else { + comparatorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; - comparator_ = value; - onChanged(); return this; } public Builder clearComparator() { + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + comparatorBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000010); - comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; - onChanged(); return this; } - - // optional bytes value = 6; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000020; - value_ = value; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getComparatorBuilder() { + bitField0_ |= 0x00000010; onChanged(); - return this; + return getComparatorFieldBuilder().getBuilder(); } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000020); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getComparatorOrBuilder() { + if (comparatorBuilder_ != null) { + return comparatorBuilder_.getMessageOrBuilder(); + } else { + return comparator_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getComparatorFieldBuilder() { + if (comparatorBuilder_ == null) { + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + comparator_, + getParentForChildren(), + isClean()); + comparator_ = null; + } + return comparatorBuilder_; } // @@protoc_insertion_point(builder_scope:Condition) @@ -5316,26 +4568,26 @@ public final class RegionClientProtos { // required .Mutate.MutateType mutateType = 2; boolean hasMutateType(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType(); // repeated .Mutate.ColumnValue columnValue = 3; - java.util.List + java.util.List getColumnValueList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index); int getColumnValueCount(); - java.util.List + java.util.List getColumnValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( int index); - // repeated .Attribute attribute = 4; - java.util.List + // repeated .NameBytesPair attribute = 4; + java.util.List getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); int getAttributeCount(); - java.util.List + java.util.List getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); // optional uint64 timestamp = 5; @@ -5375,12 +4627,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; } public enum MutateType @@ -5389,16 +4641,12 @@ public final class RegionClientProtos { INCREMENT(1, 1), PUT(2, 2), DELETE(3, 3), - DELETE_COLUMN(4, 4), - DELETE_FAMILY(5, 5), ; public static final int APPEND_VALUE = 0; public static final int INCREMENT_VALUE = 1; public static final int PUT_VALUE = 2; public static final int DELETE_VALUE = 3; - public static final int DELETE_COLUMN_VALUE = 4; - public static final int DELETE_FAMILY_VALUE = 5; public final int getNumber() { return value; } @@ -5409,8 +4657,6 @@ public final class RegionClientProtos { case 1: return INCREMENT; case 2: return PUT; case 3: return DELETE; - case 4: return DELETE_COLUMN; - case 5: return DELETE_FAMILY; default: return null; } } @@ -5437,11 +4683,11 @@ public final class RegionClientProtos { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); } private static final MutateType[] VALUES = { - APPEND, INCREMENT, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, + APPEND, INCREMENT, PUT, DELETE, }; public static MutateType valueOf( @@ -5464,6 +4710,78 @@ public final class RegionClientProtos { // @@protoc_insertion_point(enum_scope:Mutate.MutateType) } + public enum DeleteType + implements com.google.protobuf.ProtocolMessageEnum { + DELETE_ONE_VERSION(0, 0), + DELETE_MULTIPLE_VERSIONS(1, 1), + DELETE_FAMILY(2, 2), + ; + + public static final int DELETE_ONE_VERSION_VALUE = 0; + public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; + public static final int DELETE_FAMILY_VALUE = 2; + + + public final int getNumber() { return value; } + + public static DeleteType valueOf(int value) { + switch (value) { + case 0: return DELETE_ONE_VERSION; + case 1: return DELETE_MULTIPLE_VERSIONS; + case 2: return DELETE_FAMILY; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DeleteType findValueByNumber(int number) { + return DeleteType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(1); + } + + private static final DeleteType[] VALUES = { + DELETE_ONE_VERSION, DELETE_MULTIPLE_VERSIONS, DELETE_FAMILY, + }; + + public static DeleteType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private DeleteType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Mutate.DeleteType) + } + public interface ColumnValueOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -5472,18 +4790,14 @@ public final class RegionClientProtos { com.google.protobuf.ByteString getFamily(); // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - java.util.List + java.util.List getQualifierValueList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); int getQualifierValueCount(); - java.util.List + java.util.List getQualifierValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index); - - // optional uint64 timestamp = 3; - boolean hasTimestamp(); - long getTimestamp(); } public static final class ColumnValue extends com.google.protobuf.GeneratedMessage @@ -5505,18 +4819,18 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; } public interface QualifierValueOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bytes qualifier = 1; + // optional bytes qualifier = 1; boolean hasQualifier(); com.google.protobuf.ByteString getQualifier(); @@ -5527,6 +4841,10 @@ public final class RegionClientProtos { // optional uint64 timestamp = 3; boolean hasTimestamp(); long getTimestamp(); + + // optional .Mutate.DeleteType deleteType = 4; + boolean hasDeleteType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType(); } public static final class QualifierValue extends com.google.protobuf.GeneratedMessage @@ -5548,16 +4866,16 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; } private int bitField0_; - // required bytes qualifier = 1; + // optional bytes qualifier = 1; public static final int QUALIFIER_FIELD_NUMBER = 1; private com.google.protobuf.ByteString qualifier_; public boolean hasQualifier() { @@ -5587,20 +4905,27 @@ public final class RegionClientProtos { return timestamp_; } + // optional .Mutate.DeleteType deleteType = 4; + public static final int DELETETYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_; + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { + return deleteType_; + } + private void initFields() { qualifier_ = com.google.protobuf.ByteString.EMPTY; value_ = com.google.protobuf.ByteString.EMPTY; timestamp_ = 0L; + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasQualifier()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -5617,6 +4942,9 @@ public final class RegionClientProtos { if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, timestamp_); } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, deleteType_.getNumber()); + } getUnknownFields().writeTo(output); } @@ -5638,6 +4966,10 @@ public final class RegionClientProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, timestamp_); } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, deleteType_.getNumber()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -5655,10 +4987,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) obj; boolean result = true; result = result && (hasQualifier() == other.hasQualifier()); @@ -5676,6 +5008,11 @@ public final class RegionClientProtos { result = result && (getTimestamp() == other.getTimestamp()); } + result = result && (hasDeleteType() == other.hasDeleteType()); + if (hasDeleteType()) { + result = result && + (getDeleteType() == other.getDeleteType()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -5697,45 +5034,49 @@ public final class RegionClientProtos { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + hashLong(getTimestamp()); } + if (hasDeleteType()) { + hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getDeleteType()); + } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -5744,7 +5085,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5755,12 +5096,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5770,7 +5111,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -5783,18 +5124,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -5819,6 +5160,8 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000002); timestamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + bitField0_ = (bitField0_ & ~0x00000008); return this; } @@ -5828,24 +5171,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -5853,8 +5196,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -5869,22 +5212,26 @@ public final class RegionClientProtos { to_bitField0_ |= 0x00000004; } result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.deleteType_ = deleteType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; if (other.hasQualifier()) { setQualifier(other.getQualifier()); } @@ -5894,15 +5241,14 @@ public final class RegionClientProtos { if (other.hasTimestamp()) { setTimestamp(other.getTimestamp()); } + if (other.hasDeleteType()) { + setDeleteType(other.getDeleteType()); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasQualifier()) { - - return false; - } return true; } @@ -5944,13 +5290,24 @@ public final class RegionClientProtos { timestamp_ = input.readUInt64(); break; } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + deleteType_ = value; + } + break; + } } } } private int bitField0_; - // required bytes qualifier = 1; + // optional bytes qualifier = 1; private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; public boolean hasQualifier() { return ((bitField0_ & 0x00000001) == 0x00000001); @@ -6019,6 +5376,30 @@ public final class RegionClientProtos { return this; } + // optional .Mutate.DeleteType deleteType = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { + return deleteType_; + } + public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + deleteType_ = value; + onChanged(); + return this; + } + public Builder clearDeleteType() { + bitField0_ = (bitField0_ & ~0x00000008); + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue.QualifierValue) } @@ -6043,39 +5424,28 @@ public final class RegionClientProtos { // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; - private java.util.List qualifierValue_; - public java.util.List getQualifierValueList() { + private java.util.List qualifierValue_; + public java.util.List getQualifierValueList() { return qualifierValue_; } - public java.util.List + public java.util.List getQualifierValueOrBuilderList() { return qualifierValue_; } public int getQualifierValueCount() { return qualifierValue_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { return qualifierValue_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index) { return qualifierValue_.get(index); } - // optional uint64 timestamp = 3; - public static final int TIMESTAMP_FIELD_NUMBER = 3; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getTimestamp() { - return timestamp_; - } - private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; qualifierValue_ = java.util.Collections.emptyList(); - timestamp_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -6086,12 +5456,6 @@ public final class RegionClientProtos { memoizedIsInitialized = 0; return false; } - for (int i = 0; i < getQualifierValueCount(); i++) { - if (!getQualifierValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } memoizedIsInitialized = 1; return true; } @@ -6105,9 +5469,6 @@ public final class RegionClientProtos { for (int i = 0; i < qualifierValue_.size(); i++) { output.writeMessage(2, qualifierValue_.get(i)); } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(3, timestamp_); - } getUnknownFields().writeTo(output); } @@ -6125,10 +5486,6 @@ public final class RegionClientProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, qualifierValue_.get(i)); } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, timestamp_); - } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -6146,10 +5503,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); @@ -6159,11 +5516,6 @@ public final class RegionClientProtos { } result = result && getQualifierValueList() .equals(other.getQualifierValueList()); - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -6181,49 +5533,45 @@ public final class RegionClientProtos { hash = (37 * hash) + QUALIFIERVALUE_FIELD_NUMBER; hash = (53 * hash) + getQualifierValueList().hashCode(); } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -6232,7 +5580,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6243,12 +5591,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6258,7 +5606,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -6271,18 +5619,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -6310,8 +5658,6 @@ public final class RegionClientProtos { } else { qualifierValueBuilder_.clear(); } - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -6321,24 +5667,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -6346,8 +5692,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -6363,26 +5709,22 @@ public final class RegionClientProtos { } else { result.qualifierValue_ = qualifierValueBuilder_.build(); } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.timestamp_ = timestamp_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } @@ -6412,9 +5754,6 @@ public final class RegionClientProtos { } } } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -6424,12 +5763,6 @@ public final class RegionClientProtos { return false; } - for (int i = 0; i < getQualifierValueCount(); i++) { - if (!getQualifierValue(i).isInitialized()) { - - return false; - } - } return true; } @@ -6462,16 +5795,11 @@ public final class RegionClientProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addQualifierValue(subBuilder.buildPartial()); break; } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } } } } @@ -6503,19 +5831,19 @@ public final class RegionClientProtos { } // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - private java.util.List qualifierValue_ = + private java.util.List qualifierValue_ = java.util.Collections.emptyList(); private void ensureQualifierValueIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { - qualifierValue_ = new java.util.ArrayList(qualifierValue_); + qualifierValue_ = new java.util.ArrayList(qualifierValue_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; - public java.util.List getQualifierValueList() { + public java.util.List getQualifierValueList() { if (qualifierValueBuilder_ == null) { return java.util.Collections.unmodifiableList(qualifierValue_); } else { @@ -6529,7 +5857,7 @@ public final class RegionClientProtos { return qualifierValueBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { if (qualifierValueBuilder_ == null) { return qualifierValue_.get(index); } else { @@ -6537,7 +5865,7 @@ public final class RegionClientProtos { } } public Builder setQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -6551,7 +5879,7 @@ public final class RegionClientProtos { return this; } public Builder setQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.set(index, builderForValue.build()); @@ -6561,7 +5889,7 @@ public final class RegionClientProtos { } return this; } - public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { + public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -6575,7 +5903,7 @@ public final class RegionClientProtos { return this; } public Builder addQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { if (qualifierValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -6589,7 +5917,7 @@ public final class RegionClientProtos { return this; } public Builder addQualifierValue( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.add(builderForValue.build()); @@ -6600,7 +5928,7 @@ public final class RegionClientProtos { return this; } public Builder addQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); qualifierValue_.add(index, builderForValue.build()); @@ -6611,7 +5939,7 @@ public final class RegionClientProtos { return this; } public Builder addAllQualifierValue( - java.lang.Iterable values) { + java.lang.Iterable values) { if (qualifierValueBuilder_ == null) { ensureQualifierValueIsMutable(); super.addAll(values, qualifierValue_); @@ -6641,18 +5969,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( int index) { return getQualifierValueFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( int index) { if (qualifierValueBuilder_ == null) { return qualifierValue_.get(index); } else { return qualifierValueBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getQualifierValueOrBuilderList() { if (qualifierValueBuilder_ != null) { return qualifierValueBuilder_.getMessageOrBuilderList(); @@ -6660,25 +5988,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(qualifierValue_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { return getQualifierValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( int index) { return getQualifierValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); } - public java.util.List + public java.util.List getQualifierValueBuilderList() { return getQualifierValueFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> getQualifierValueFieldBuilder() { if (qualifierValueBuilder_ == null) { qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( qualifierValue_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), @@ -6688,27 +6016,6 @@ public final class RegionClientProtos { return qualifierValueBuilder_; } - // optional uint64 timestamp = 3; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000004; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - onChanged(); - return this; - } - // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue) } @@ -6733,52 +6040,52 @@ public final class RegionClientProtos { // required .Mutate.MutateType mutateType = 2; public static final int MUTATETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType mutateType_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_; public boolean hasMutateType() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { return mutateType_; } // repeated .Mutate.ColumnValue columnValue = 3; public static final int COLUMNVALUE_FIELD_NUMBER = 3; - private java.util.List columnValue_; - public java.util.List getColumnValueList() { + private java.util.List columnValue_; + public java.util.List getColumnValueList() { return columnValue_; } - public java.util.List + public java.util.List getColumnValueOrBuilderList() { return columnValue_; } public int getColumnValueCount() { return columnValue_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { return columnValue_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( int index) { return columnValue_.get(index); } - // repeated .Attribute attribute = 4; + // repeated .NameBytesPair attribute = 4; public static final int ATTRIBUTE_FIELD_NUMBER = 4; - private java.util.List attribute_; - public java.util.List getAttributeList() { + private java.util.List attribute_; + public java.util.List getAttributeList() { return attribute_; } - public java.util.List + public java.util.List getAttributeOrBuilderList() { return attribute_; } public int getAttributeCount() { return attribute_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } @@ -6828,7 +6135,7 @@ public final class RegionClientProtos { private void initFields() { row_ = com.google.protobuf.ByteString.EMPTY; - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; columnValue_ = java.util.Collections.emptyList(); attribute_ = java.util.Collections.emptyList(); timestamp_ = 0L; @@ -6950,10 +6257,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); @@ -7035,41 +6342,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -7078,7 +6385,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7089,12 +6396,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7104,7 +6411,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -7117,18 +6424,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -7152,7 +6459,7 @@ public final class RegionClientProtos { super.clear(); row_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; bitField0_ = (bitField0_ & ~0x00000002); if (columnValueBuilder_ == null) { columnValue_ = java.util.Collections.emptyList(); @@ -7187,24 +6494,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -7212,8 +6519,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -7268,16 +6575,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } @@ -7406,7 +6713,7 @@ public final class RegionClientProtos { } case 16: { int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.valueOf(rawValue); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { @@ -7416,13 +6723,13 @@ public final class RegionClientProtos { break; } case 26: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addColumnValue(subBuilder.buildPartial()); break; } case 34: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addAttribute(subBuilder.buildPartial()); break; @@ -7482,14 +6789,14 @@ public final class RegionClientProtos { } // required .Mutate.MutateType mutateType = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; public boolean hasMutateType() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { return mutateType_; } - public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType value) { + public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value) { if (value == null) { throw new NullPointerException(); } @@ -7500,25 +6807,25 @@ public final class RegionClientProtos { } public Builder clearMutateType() { bitField0_ = (bitField0_ & ~0x00000002); - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; onChanged(); return this; } // repeated .Mutate.ColumnValue columnValue = 3; - private java.util.List columnValue_ = + private java.util.List columnValue_ = java.util.Collections.emptyList(); private void ensureColumnValueIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { - columnValue_ = new java.util.ArrayList(columnValue_); + columnValue_ = new java.util.ArrayList(columnValue_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; - public java.util.List getColumnValueList() { + public java.util.List getColumnValueList() { if (columnValueBuilder_ == null) { return java.util.Collections.unmodifiableList(columnValue_); } else { @@ -7532,7 +6839,7 @@ public final class RegionClientProtos { return columnValueBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { if (columnValueBuilder_ == null) { return columnValue_.get(index); } else { @@ -7540,7 +6847,7 @@ public final class RegionClientProtos { } } public Builder setColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -7554,7 +6861,7 @@ public final class RegionClientProtos { return this; } public Builder setColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.set(index, builderForValue.build()); @@ -7564,7 +6871,7 @@ public final class RegionClientProtos { } return this; } - public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { + public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -7578,7 +6885,7 @@ public final class RegionClientProtos { return this; } public Builder addColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { if (columnValueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -7592,7 +6899,7 @@ public final class RegionClientProtos { return this; } public Builder addColumnValue( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.add(builderForValue.build()); @@ -7603,7 +6910,7 @@ public final class RegionClientProtos { return this; } public Builder addColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); columnValue_.add(index, builderForValue.build()); @@ -7614,7 +6921,7 @@ public final class RegionClientProtos { return this; } public Builder addAllColumnValue( - java.lang.Iterable values) { + java.lang.Iterable values) { if (columnValueBuilder_ == null) { ensureColumnValueIsMutable(); super.addAll(values, columnValue_); @@ -7644,18 +6951,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( int index) { return getColumnValueFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( int index) { if (columnValueBuilder_ == null) { return columnValue_.get(index); } else { return columnValueBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getColumnValueOrBuilderList() { if (columnValueBuilder_ != null) { return columnValueBuilder_.getMessageOrBuilderList(); @@ -7663,25 +6970,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(columnValue_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { return getColumnValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( int index) { return getColumnValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); } - public java.util.List + public java.util.List getColumnValueBuilderList() { return getColumnValueFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> getColumnValueFieldBuilder() { if (columnValueBuilder_ == null) { columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder>( columnValue_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), @@ -7691,20 +6998,20 @@ public final class RegionClientProtos { return columnValueBuilder_; } - // repeated .Attribute attribute = 4; - private java.util.List attribute_ = + // repeated .NameBytesPair attribute = 4; + private java.util.List attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { - attribute_ = new java.util.ArrayList(attribute_); + attribute_ = new java.util.ArrayList(attribute_); bitField0_ |= 0x00000008; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - public java.util.List getAttributeList() { + public java.util.List getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); } else { @@ -7718,7 +7025,7 @@ public final class RegionClientProtos { return attributeBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { @@ -7726,7 +7033,7 @@ public final class RegionClientProtos { } } public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -7740,7 +7047,7 @@ public final class RegionClientProtos { return this; } public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.set(index, builderForValue.build()); @@ -7750,7 +7057,7 @@ public final class RegionClientProtos { } return this; } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -7764,7 +7071,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -7778,7 +7085,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(builderForValue.build()); @@ -7789,7 +7096,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(index, builderForValue.build()); @@ -7800,7 +7107,7 @@ public final class RegionClientProtos { return this; } public Builder addAllAttribute( - java.lang.Iterable values) { + java.lang.Iterable values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); super.addAll(values, attribute_); @@ -7830,18 +7137,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getAttributeOrBuilderList() { if (attributeBuilder_ != null) { return attributeBuilder_.getMessageOrBuilderList(); @@ -7849,25 +7156,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(attribute_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public java.util.List + public java.util.List getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), @@ -8051,13 +7358,13 @@ public final class RegionClientProtos { // required .Mutate mutate = 2; boolean hasMutate(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder(); // optional .Condition condition = 3; boolean hasCondition(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); } public static final class MutateRequest extends com.google.protobuf.GeneratedMessage @@ -8079,12 +7386,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; } private int bitField0_; @@ -8103,34 +7410,34 @@ public final class RegionClientProtos { // required .Mutate mutate = 2; public static final int MUTATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate mutate_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_; public boolean hasMutate() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { return mutate_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { return mutate_; } // optional .Condition condition = 3; public static final int CONDITION_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition condition_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_; public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { return condition_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { return condition_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); - condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -8213,10 +7520,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -8259,41 +7566,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -8302,7 +7609,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -8313,12 +7620,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -8328,7 +7635,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -8341,18 +7648,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -8381,13 +7688,13 @@ public final class RegionClientProtos { } bitField0_ = (bitField0_ & ~0x00000001); if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); } else { mutateBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); } else { conditionBuilder_.clear(); } @@ -8401,24 +7708,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -8426,8 +7733,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -8460,16 +7767,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -8542,7 +7849,7 @@ public final class RegionClientProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); if (hasMutate()) { subBuilder.mergeFrom(getMutate()); } @@ -8551,7 +7858,7 @@ public final class RegionClientProtos { break; } case 26: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(); if (hasCondition()) { subBuilder.mergeFrom(getCondition()); } @@ -8656,20 +7963,20 @@ public final class RegionClientProtos { } // required .Mutate mutate = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder> mutateBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutateBuilder_; public boolean hasMutate() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { if (mutateBuilder_ == null) { return mutate_; } else { return mutateBuilder_.getMessage(); } } - public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate value) { + public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { if (mutateBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -8683,7 +7990,7 @@ public final class RegionClientProtos { return this; } public Builder setMutate( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { if (mutateBuilder_ == null) { mutate_ = builderForValue.build(); onChanged(); @@ -8693,12 +8000,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000002; return this; } - public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate value) { + public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { if (mutateBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - mutate_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance()) { + mutate_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) { mutate_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); } else { mutate_ = value; } @@ -8711,7 +8018,7 @@ public final class RegionClientProtos { } public Builder clearMutate() { if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); onChanged(); } else { mutateBuilder_.clear(); @@ -8719,12 +8026,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder getMutateBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutateBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMutateFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { if (mutateBuilder_ != null) { return mutateBuilder_.getMessageOrBuilder(); } else { @@ -8732,11 +8039,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> getMutateFieldBuilder() { if (mutateBuilder_ == null) { mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( mutate_, getParentForChildren(), isClean()); @@ -8746,20 +8053,20 @@ public final class RegionClientProtos { } // optional .Condition condition = 3; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder> conditionBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; public boolean hasCondition() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { if (conditionBuilder_ == null) { return condition_; } else { return conditionBuilder_.getMessage(); } } - public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition value) { + public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -8773,7 +8080,7 @@ public final class RegionClientProtos { return this; } public Builder setCondition( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { if (conditionBuilder_ == null) { condition_ = builderForValue.build(); onChanged(); @@ -8783,12 +8090,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000004; return this; } - public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition value) { + public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { if (conditionBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && - condition_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance()) { + condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { condition_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); } else { condition_ = value; } @@ -8801,7 +8108,7 @@ public final class RegionClientProtos { } public Builder clearCondition() { if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); onChanged(); } else { conditionBuilder_.clear(); @@ -8809,12 +8116,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000004); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder getConditionBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { bitField0_ |= 0x00000004; onChanged(); return getConditionFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { if (conditionBuilder_ != null) { return conditionBuilder_.getMessageOrBuilder(); } else { @@ -8822,11 +8129,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder() { if (conditionBuilder_ == null) { conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>( condition_, getParentForChildren(), isClean()); @@ -8851,8 +8158,8 @@ public final class RegionClientProtos { // optional .Result result = 1; boolean hasResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); // optional bool processed = 2; boolean hasProcessed(); @@ -8878,25 +8185,25 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; } private int bitField0_; // optional .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { return result_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { return result_; } @@ -8911,7 +8218,7 @@ public final class RegionClientProtos { } private void initFields() { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); processed_ = false; } private byte memoizedIsInitialized = -1; @@ -8919,12 +8226,6 @@ public final class RegionClientProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (hasResult()) { - if (!getResult().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } memoizedIsInitialized = 1; return true; } @@ -8972,10 +8273,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj; boolean result = true; result = result && (hasResult() == other.hasResult()); @@ -9009,41 +8310,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -9052,7 +8353,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9063,12 +8364,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9078,7 +8379,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -9091,18 +8392,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -9123,7 +8424,7 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); } else { resultBuilder_.clear(); } @@ -9139,24 +8440,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -9164,8 +8465,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -9186,16 +8487,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; if (other.hasResult()) { mergeResult(other.getResult()); } @@ -9207,12 +8508,6 @@ public final class RegionClientProtos { } public final boolean isInitialized() { - if (hasResult()) { - if (!getResult().isInitialized()) { - - return false; - } - } return true; } @@ -9240,7 +8535,7 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); if (hasResult()) { subBuilder.mergeFrom(getResult()); } @@ -9260,20 +8555,20 @@ public final class RegionClientProtos { private int bitField0_; // optional .Result result = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; public boolean hasResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { if (resultBuilder_ == null) { return result_; } else { return resultBuilder_.getMessage(); } } - public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -9287,7 +8582,7 @@ public final class RegionClientProtos { return this; } public Builder setResult( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); @@ -9297,12 +8592,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000001; return this; } - public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - result_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) { + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { result_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); } else { result_ = value; } @@ -9315,7 +8610,7 @@ public final class RegionClientProtos { } public Builder clearResult() { if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); onChanged(); } else { resultBuilder_.clear(); @@ -9323,12 +8618,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { bitField0_ |= 0x00000001; onChanged(); return getResultFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilder(); } else { @@ -9336,11 +8631,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( result_, getParentForChildren(), isClean()); @@ -9385,23 +8680,23 @@ public final class RegionClientProtos { extends com.google.protobuf.MessageOrBuilder { // repeated .Column column = 1; - java.util.List + java.util.List getColumnList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); int getColumnCount(); - java.util.List + java.util.List getColumnOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index); - // repeated .Attribute attribute = 2; - java.util.List + // repeated .NameBytesPair attribute = 2; + java.util.List getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); int getAttributeCount(); - java.util.List + java.util.List getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index); // optional bytes startRow = 3; @@ -9412,10 +8707,10 @@ public final class RegionClientProtos { boolean hasStopRow(); com.google.protobuf.ByteString getStopRow(); - // optional .Parameter filter = 5; + // optional .NameBytesPair filter = 5; boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder(); // optional .TimeRange timeRange = 6; boolean hasTimeRange(); @@ -9430,11 +8725,7 @@ public final class RegionClientProtos { boolean hasCacheBlocks(); boolean getCacheBlocks(); - // optional uint32 rowsToCache = 9; - boolean hasRowsToCache(); - int getRowsToCache(); - - // optional uint32 batchSize = 10; + // optional uint32 batchSize = 9; boolean hasBatchSize(); int getBatchSize(); } @@ -9458,53 +8749,53 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; } private int bitField0_; // repeated .Column column = 1; public static final int COLUMN_FIELD_NUMBER = 1; - private java.util.List column_; - public java.util.List getColumnList() { + private java.util.List column_; + public java.util.List getColumnList() { return column_; } - public java.util.List + public java.util.List getColumnOrBuilderList() { return column_; } public int getColumnCount() { return column_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { return column_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { return column_.get(index); } - // repeated .Attribute attribute = 2; + // repeated .NameBytesPair attribute = 2; public static final int ATTRIBUTE_FIELD_NUMBER = 2; - private java.util.List attribute_; - public java.util.List getAttributeList() { + private java.util.List attribute_; + public java.util.List getAttributeList() { return attribute_; } - public java.util.List + public java.util.List getAttributeOrBuilderList() { return attribute_; } public int getAttributeCount() { return attribute_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { return attribute_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { return attribute_.get(index); } @@ -9529,16 +8820,16 @@ public final class RegionClientProtos { return stopRow_; } - // optional .Parameter filter = 5; + // optional .NameBytesPair filter = 5; public static final int FILTER_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_; public boolean hasFilter() { return ((bitField0_ & 0x00000004) == 0x00000004); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { return filter_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { return filter_; } @@ -9575,21 +8866,11 @@ public final class RegionClientProtos { return cacheBlocks_; } - // optional uint32 rowsToCache = 9; - public static final int ROWSTOCACHE_FIELD_NUMBER = 9; - private int rowsToCache_; - public boolean hasRowsToCache() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getRowsToCache() { - return rowsToCache_; - } - - // optional uint32 batchSize = 10; - public static final int BATCHSIZE_FIELD_NUMBER = 10; + // optional uint32 batchSize = 9; + public static final int BATCHSIZE_FIELD_NUMBER = 9; private int batchSize_; public boolean hasBatchSize() { - return ((bitField0_ & 0x00000080) == 0x00000080); + return ((bitField0_ & 0x00000040) == 0x00000040); } public int getBatchSize() { return batchSize_; @@ -9600,11 +8881,10 @@ public final class RegionClientProtos { attribute_ = java.util.Collections.emptyList(); startRow_ = com.google.protobuf.ByteString.EMPTY; stopRow_ = com.google.protobuf.ByteString.EMPTY; - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); maxVersions_ = 1; cacheBlocks_ = true; - rowsToCache_ = 0; batchSize_ = 0; } private byte memoizedIsInitialized = -1; @@ -9662,10 +8942,7 @@ public final class RegionClientProtos { output.writeBool(8, cacheBlocks_); } if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeUInt32(9, rowsToCache_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeUInt32(10, batchSize_); + output.writeUInt32(9, batchSize_); } getUnknownFields().writeTo(output); } @@ -9710,11 +8987,7 @@ public final class RegionClientProtos { } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(9, rowsToCache_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(10, batchSize_); + .computeUInt32Size(9, batchSize_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -9733,10 +9006,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj; boolean result = true; result = result && getColumnList() @@ -9773,11 +9046,6 @@ public final class RegionClientProtos { result = result && (getCacheBlocks() == other.getCacheBlocks()); } - result = result && (hasRowsToCache() == other.hasRowsToCache()); - if (hasRowsToCache()) { - result = result && (getRowsToCache() - == other.getRowsToCache()); - } result = result && (hasBatchSize() == other.hasBatchSize()); if (hasBatchSize()) { result = result && (getBatchSize() @@ -9824,10 +9092,6 @@ public final class RegionClientProtos { hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getCacheBlocks()); } - if (hasRowsToCache()) { - hash = (37 * hash) + ROWSTOCACHE_FIELD_NUMBER; - hash = (53 * hash) + getRowsToCache(); - } if (hasBatchSize()) { hash = (37 * hash) + BATCHSIZE_FIELD_NUMBER; hash = (53 * hash) + getBatchSize(); @@ -9836,41 +9100,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -9879,7 +9143,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9890,12 +9154,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -9905,7 +9169,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -9918,18 +9182,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -9969,7 +9233,7 @@ public final class RegionClientProtos { stopRow_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } else { filterBuilder_.clear(); } @@ -9984,10 +9248,8 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000040); cacheBlocks_ = true; bitField0_ = (bitField0_ & ~0x00000080); - rowsToCache_ = 0; - bitField0_ = (bitField0_ & ~0x00000100); batchSize_ = 0; - bitField0_ = (bitField0_ & ~0x00000200); + bitField0_ = (bitField0_ & ~0x00000100); return this; } @@ -9997,24 +9259,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -10022,8 +9284,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (columnBuilder_ == null) { @@ -10079,10 +9341,6 @@ public final class RegionClientProtos { if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000040; } - result.rowsToCache_ = rowsToCache_; - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000080; - } result.batchSize_ = batchSize_; result.bitField0_ = to_bitField0_; onBuilt(); @@ -10090,16 +9348,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; if (columnBuilder_ == null) { if (!other.column_.isEmpty()) { if (column_.isEmpty()) { @@ -10170,9 +9428,6 @@ public final class RegionClientProtos { if (other.hasCacheBlocks()) { setCacheBlocks(other.getCacheBlocks()); } - if (other.hasRowsToCache()) { - setRowsToCache(other.getRowsToCache()); - } if (other.hasBatchSize()) { setBatchSize(other.getBatchSize()); } @@ -10226,13 +9481,13 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addColumn(subBuilder.buildPartial()); break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addAttribute(subBuilder.buildPartial()); break; @@ -10248,7 +9503,7 @@ public final class RegionClientProtos { break; } case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); if (hasFilter()) { subBuilder.mergeFrom(getFilter()); } @@ -10277,11 +9532,6 @@ public final class RegionClientProtos { } case 72: { bitField0_ |= 0x00000100; - rowsToCache_ = input.readUInt32(); - break; - } - case 80: { - bitField0_ |= 0x00000200; batchSize_ = input.readUInt32(); break; } @@ -10292,19 +9542,19 @@ public final class RegionClientProtos { private int bitField0_; // repeated .Column column = 1; - private java.util.List column_ = + private java.util.List column_ = java.util.Collections.emptyList(); private void ensureColumnIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - column_ = new java.util.ArrayList(column_); + column_ = new java.util.ArrayList(column_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> columnBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; - public java.util.List getColumnList() { + public java.util.List getColumnList() { if (columnBuilder_ == null) { return java.util.Collections.unmodifiableList(column_); } else { @@ -10318,7 +9568,7 @@ public final class RegionClientProtos { return columnBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { if (columnBuilder_ == null) { return column_.get(index); } else { @@ -10326,7 +9576,7 @@ public final class RegionClientProtos { } } public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10340,7 +9590,7 @@ public final class RegionClientProtos { return this; } public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.set(index, builderForValue.build()); @@ -10350,7 +9600,7 @@ public final class RegionClientProtos { } return this; } - public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10364,7 +9614,7 @@ public final class RegionClientProtos { return this; } public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { if (columnBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10378,7 +9628,7 @@ public final class RegionClientProtos { return this; } public Builder addColumn( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(builderForValue.build()); @@ -10389,7 +9639,7 @@ public final class RegionClientProtos { return this; } public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { if (columnBuilder_ == null) { ensureColumnIsMutable(); column_.add(index, builderForValue.build()); @@ -10400,7 +9650,7 @@ public final class RegionClientProtos { return this; } public Builder addAllColumn( - java.lang.Iterable values) { + java.lang.Iterable values) { if (columnBuilder_ == null) { ensureColumnIsMutable(); super.addAll(values, column_); @@ -10430,18 +9680,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder getColumnBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( int index) { return getColumnFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( int index) { if (columnBuilder_ == null) { return column_.get(index); } else { return columnBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getColumnOrBuilderList() { if (columnBuilder_ != null) { return columnBuilder_.getMessageOrBuilderList(); @@ -10449,25 +9699,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(column_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { return getColumnFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( int index) { return getColumnFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); } - public java.util.List + public java.util.List getColumnBuilderList() { return getColumnFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder() { if (columnBuilder_ == null) { columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( column_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), @@ -10477,20 +9727,20 @@ public final class RegionClientProtos { return columnBuilder_; } - // repeated .Attribute attribute = 2; - private java.util.List attribute_ = + // repeated .NameBytesPair attribute = 2; + private java.util.List attribute_ = java.util.Collections.emptyList(); private void ensureAttributeIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { - attribute_ = new java.util.ArrayList(attribute_); + attribute_ = new java.util.ArrayList(attribute_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; - public java.util.List getAttributeList() { + public java.util.List getAttributeList() { if (attributeBuilder_ == null) { return java.util.Collections.unmodifiableList(attribute_); } else { @@ -10504,7 +9754,7 @@ public final class RegionClientProtos { return attributeBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { @@ -10512,7 +9762,7 @@ public final class RegionClientProtos { } } public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10526,7 +9776,7 @@ public final class RegionClientProtos { return this; } public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.set(index, builderForValue.build()); @@ -10536,7 +9786,7 @@ public final class RegionClientProtos { } return this; } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10550,7 +9800,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (attributeBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10564,7 +9814,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(builderForValue.build()); @@ -10575,7 +9825,7 @@ public final class RegionClientProtos { return this; } public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); attribute_.add(index, builderForValue.build()); @@ -10586,7 +9836,7 @@ public final class RegionClientProtos { return this; } public Builder addAllAttribute( - java.lang.Iterable values) { + java.lang.Iterable values) { if (attributeBuilder_ == null) { ensureAttributeIsMutable(); super.addAll(values, attribute_); @@ -10616,18 +9866,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( int index) { return getAttributeFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( int index) { if (attributeBuilder_ == null) { return attribute_.get(index); } else { return attributeBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getAttributeOrBuilderList() { if (attributeBuilder_ != null) { return attributeBuilder_.getMessageOrBuilderList(); @@ -10635,25 +9885,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(attribute_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( int index) { return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public java.util.List + public java.util.List getAttributeBuilderList() { return getAttributeFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder() { if (attributeBuilder_ == null) { attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( attribute_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), @@ -10711,21 +9961,21 @@ public final class RegionClientProtos { return this; } - // optional .Parameter filter = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + // optional .NameBytesPair filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> filterBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> filterBuilder_; public boolean hasFilter() { return ((bitField0_ & 0x00000010) == 0x00000010); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { if (filterBuilder_ == null) { return filter_; } else { return filterBuilder_.getMessage(); } } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (filterBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -10739,7 +9989,7 @@ public final class RegionClientProtos { return this; } public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (filterBuilder_ == null) { filter_ = builderForValue.build(); onChanged(); @@ -10749,12 +9999,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000010; return this; } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (filterBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { filter_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(filter_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(filter_).mergeFrom(value).buildPartial(); } else { filter_ = value; } @@ -10767,7 +10017,7 @@ public final class RegionClientProtos { } public Builder clearFilter() { if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); onChanged(); } else { filterBuilder_.clear(); @@ -10775,12 +10025,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000010); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getFilterBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getFilterBuilder() { bitField0_ |= 0x00000010; onChanged(); return getFilterFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { if (filterBuilder_ != null) { return filterBuilder_.getMessageOrBuilder(); } else { @@ -10788,11 +10038,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getFilterFieldBuilder() { if (filterBuilder_ == null) { filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( filter_, getParentForChildren(), isClean()); @@ -10933,43 +10183,22 @@ public final class RegionClientProtos { return this; } - // optional uint32 rowsToCache = 9; - private int rowsToCache_ ; - public boolean hasRowsToCache() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public int getRowsToCache() { - return rowsToCache_; - } - public Builder setRowsToCache(int value) { - bitField0_ |= 0x00000100; - rowsToCache_ = value; - onChanged(); - return this; - } - public Builder clearRowsToCache() { - bitField0_ = (bitField0_ & ~0x00000100); - rowsToCache_ = 0; - onChanged(); - return this; - } - - // optional uint32 batchSize = 10; + // optional uint32 batchSize = 9; private int batchSize_ ; public boolean hasBatchSize() { - return ((bitField0_ & 0x00000200) == 0x00000200); + return ((bitField0_ & 0x00000100) == 0x00000100); } public int getBatchSize() { return batchSize_; } public Builder setBatchSize(int value) { - bitField0_ |= 0x00000200; + bitField0_ |= 0x00000100; batchSize_ = value; onChanged(); return this; } public Builder clearBatchSize() { - bitField0_ = (bitField0_ & ~0x00000200); + bitField0_ = (bitField0_ & ~0x00000100); batchSize_ = 0; onChanged(); return this; @@ -10989,20 +10218,25 @@ public final class RegionClientProtos { public interface ScanRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional uint64 scannerId = 1; - boolean hasScannerId(); - long getScannerId(); + // optional .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); // optional .Scan scan = 2; boolean hasScan(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - // optional uint32 numberOfRows = 3; + // optional uint64 scannerId = 3; + boolean hasScannerId(); + long getScannerId(); + + // optional uint32 numberOfRows = 4; boolean hasNumberOfRows(); int getNumberOfRows(); - // optional bool closeScanner = 4; + // optional bool closeScanner = 5; boolean hasCloseScanner(); boolean getCloseScanner(); } @@ -11026,61 +10260,75 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; } private int bitField0_; - // optional uint64 scannerId = 1; - public static final int SCANNERID_FIELD_NUMBER = 1; + // optional .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional .Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // optional uint64 scannerId = 3; + public static final int SCANNERID_FIELD_NUMBER = 3; private long scannerId_; public boolean hasScannerId() { - return ((bitField0_ & 0x00000001) == 0x00000001); + return ((bitField0_ & 0x00000004) == 0x00000004); } public long getScannerId() { return scannerId_; } - // optional .Scan scan = 2; - public static final int SCAN_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan scan_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan() { - return scan_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // optional uint32 numberOfRows = 3; - public static final int NUMBEROFROWS_FIELD_NUMBER = 3; + // optional uint32 numberOfRows = 4; + public static final int NUMBEROFROWS_FIELD_NUMBER = 4; private int numberOfRows_; public boolean hasNumberOfRows() { - return ((bitField0_ & 0x00000004) == 0x00000004); + return ((bitField0_ & 0x00000008) == 0x00000008); } public int getNumberOfRows() { return numberOfRows_; } - // optional bool closeScanner = 4; - public static final int CLOSESCANNER_FIELD_NUMBER = 4; + // optional bool closeScanner = 5; + public static final int CLOSESCANNER_FIELD_NUMBER = 5; private boolean closeScanner_; public boolean hasCloseScanner() { - return ((bitField0_ & 0x00000008) == 0x00000008); + return ((bitField0_ & 0x00000010) == 0x00000010); } public boolean getCloseScanner() { return closeScanner_; } private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); scannerId_ = 0L; - scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); numberOfRows_ = 0; closeScanner_ = false; } @@ -11089,6 +10337,12 @@ public final class RegionClientProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (hasRegion()) { + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } if (hasScan()) { if (!getScan().isInitialized()) { memoizedIsInitialized = 0; @@ -11103,16 +10357,19 @@ public final class RegionClientProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, scannerId_); + output.writeMessage(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, scan_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt32(3, numberOfRows_); + output.writeUInt64(3, scannerId_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(4, closeScanner_); + output.writeUInt32(4, numberOfRows_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, closeScanner_); } getUnknownFields().writeTo(output); } @@ -11125,7 +10382,7 @@ public final class RegionClientProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, scannerId_); + .computeMessageSize(1, region_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream @@ -11133,11 +10390,15 @@ public final class RegionClientProtos { } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(3, numberOfRows_); + .computeUInt64Size(3, scannerId_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, closeScanner_); + .computeUInt32Size(4, numberOfRows_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, closeScanner_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -11156,22 +10417,27 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj; boolean result = true; - result = result && (hasScannerId() == other.hasScannerId()); - if (hasScannerId()) { - result = result && (getScannerId() - == other.getScannerId()); + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); } result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); } + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } result = result && (hasNumberOfRows() == other.hasNumberOfRows()); if (hasNumberOfRows()) { result = result && (getNumberOfRows() @@ -11191,14 +10457,18 @@ public final class RegionClientProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasScannerId()) { - hash = (37 * hash) + SCANNERID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); } if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } if (hasNumberOfRows()) { hash = (37 * hash) + NUMBEROFROWS_FIELD_NUMBER; hash = (53 * hash) + getNumberOfRows(); @@ -11211,41 +10481,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -11254,7 +10524,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11265,12 +10535,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11280,7 +10550,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -11293,18 +10563,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -11315,6 +10585,7 @@ public final class RegionClientProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); getScanFieldBuilder(); } } @@ -11324,18 +10595,24 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); - scannerId_ = 0L; + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000001); if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); } else { scanBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); - numberOfRows_ = 0; + scannerId_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); - closeScanner_ = false; + numberOfRows_ = 0; bitField0_ = (bitField0_ & ~0x00000008); + closeScanner_ = false; + bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -11345,24 +10622,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -11370,14 +10647,18 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.scannerId_ = scannerId_; + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } @@ -11389,10 +10670,14 @@ public final class RegionClientProtos { if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } - result.numberOfRows_ = numberOfRows_; + result.scannerId_ = scannerId_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } + result.numberOfRows_ = numberOfRows_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } result.closeScanner_ = closeScanner_; result.bitField0_ = to_bitField0_; onBuilt(); @@ -11400,22 +10685,25 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance()) return this; - if (other.hasScannerId()) { - setScannerId(other.getScannerId()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); } if (other.hasScan()) { mergeScan(other.getScan()); } + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } if (other.hasNumberOfRows()) { setNumberOfRows(other.getNumberOfRows()); } @@ -11427,6 +10715,12 @@ public final class RegionClientProtos { } public final boolean isInitialized() { + if (hasRegion()) { + if (!getRegion().isInitialized()) { + + return false; + } + } if (hasScan()) { if (!getScan().isInitialized()) { @@ -11459,13 +10753,17 @@ public final class RegionClientProtos { } break; } - case 8: { - bitField0_ |= 0x00000001; - scannerId_ = input.readUInt64(); + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); if (hasScan()) { subBuilder.mergeFrom(getScan()); } @@ -11475,11 +10773,16 @@ public final class RegionClientProtos { } case 24: { bitField0_ |= 0x00000004; - numberOfRows_ = input.readUInt32(); + scannerId_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; + numberOfRows_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; closeScanner_ = input.readBool(); break; } @@ -11489,42 +10792,111 @@ public final class RegionClientProtos { private int bitField0_; - // optional uint64 scannerId = 1; - private long scannerId_ ; - public boolean hasScannerId() { + // optional .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public long getScannerId() { - return scannerId_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } } - public Builder setScannerId(long value) { + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } bitField0_ |= 0x00000001; - scannerId_ = value; - onChanged(); return this; } - public Builder clearScannerId() { - bitField0_ = (bitField0_ & ~0x00000001); - scannerId_ = 0L; - onChanged(); + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; return this; } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } // optional .Scan scan = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder> scanBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; public boolean hasScan() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { if (scanBuilder_ == null) { return scan_; } else { return scanBuilder_.getMessage(); } } - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan value) { + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -11538,7 +10910,7 @@ public final class RegionClientProtos { return this; } public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { if (scanBuilder_ == null) { scan_ = builderForValue.build(); onChanged(); @@ -11548,12 +10920,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000002; return this; } - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan value) { + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { if (scanBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance()) { + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { scan_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); } else { scan_ = value; } @@ -11566,7 +10938,7 @@ public final class RegionClientProtos { } public Builder clearScan() { if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); onChanged(); } else { scanBuilder_.clear(); @@ -11574,12 +10946,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder getScanBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { bitField0_ |= 0x00000002; onChanged(); return getScanFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { if (scanBuilder_ != null) { return scanBuilder_.getMessageOrBuilder(); } else { @@ -11587,11 +10959,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder() { if (scanBuilder_ == null) { scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( scan_, getParentForChildren(), isClean()); @@ -11600,43 +10972,64 @@ public final class RegionClientProtos { return scanBuilder_; } - // optional uint32 numberOfRows = 3; + // optional uint64 scannerId = 3; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000004; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000004); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional uint32 numberOfRows = 4; private int numberOfRows_ ; public boolean hasNumberOfRows() { - return ((bitField0_ & 0x00000004) == 0x00000004); + return ((bitField0_ & 0x00000008) == 0x00000008); } public int getNumberOfRows() { return numberOfRows_; } public Builder setNumberOfRows(int value) { - bitField0_ |= 0x00000004; + bitField0_ |= 0x00000008; numberOfRows_ = value; onChanged(); return this; } public Builder clearNumberOfRows() { - bitField0_ = (bitField0_ & ~0x00000004); + bitField0_ = (bitField0_ & ~0x00000008); numberOfRows_ = 0; onChanged(); return this; } - // optional bool closeScanner = 4; + // optional bool closeScanner = 5; private boolean closeScanner_ ; public boolean hasCloseScanner() { - return ((bitField0_ & 0x00000008) == 0x00000008); + return ((bitField0_ & 0x00000010) == 0x00000010); } public boolean getCloseScanner() { return closeScanner_; } public Builder setCloseScanner(boolean value) { - bitField0_ |= 0x00000008; + bitField0_ |= 0x00000010; closeScanner_ = value; onChanged(); return this; } public Builder clearCloseScanner() { - bitField0_ = (bitField0_ & ~0x00000008); + bitField0_ = (bitField0_ & ~0x00000010); closeScanner_ = false; onChanged(); return this; @@ -11657,13 +11050,13 @@ public final class RegionClientProtos { extends com.google.protobuf.MessageOrBuilder { // repeated .Result result = 1; - java.util.List + java.util.List getResultList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index); int getResultCount(); - java.util.List + java.util.List getResultOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index); // optional uint64 scannerId = 2; @@ -11698,32 +11091,32 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; } private int bitField0_; // repeated .Result result = 1; public static final int RESULT_FIELD_NUMBER = 1; - private java.util.List result_; - public java.util.List getResultList() { + private java.util.List result_; + public java.util.List getResultList() { return result_; } - public java.util.List + public java.util.List getResultOrBuilderList() { return result_; } public int getResultCount() { return result_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { return result_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index) { return result_.get(index); } @@ -11769,12 +11162,6 @@ public final class RegionClientProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } memoizedIsInitialized = 1; return true; } @@ -11836,10 +11223,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj; boolean result = true; result = result && getResultList() @@ -11888,41 +11275,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -11931,7 +11318,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11942,12 +11329,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -11957,7 +11344,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -11970,18 +11357,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -12022,24 +11409,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -12047,8 +11434,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (resultBuilder_ == null) { @@ -12078,16 +11465,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this; if (resultBuilder_ == null) { if (!other.result_.isEmpty()) { if (result_.isEmpty()) { @@ -12128,12 +11515,6 @@ public final class RegionClientProtos { } public final boolean isInitialized() { - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { - - return false; - } - } return true; } @@ -12161,7 +11542,7 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addResult(subBuilder.buildPartial()); break; @@ -12188,19 +11569,19 @@ public final class RegionClientProtos { private int bitField0_; // repeated .Result result = 1; - private java.util.List result_ = + private java.util.List result_ = java.util.Collections.emptyList(); private void ensureResultIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new java.util.ArrayList(result_); + result_ = new java.util.ArrayList(result_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; - public java.util.List getResultList() { + public java.util.List getResultList() { if (resultBuilder_ == null) { return java.util.Collections.unmodifiableList(result_); } else { @@ -12214,7 +11595,7 @@ public final class RegionClientProtos { return resultBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { if (resultBuilder_ == null) { return result_.get(index); } else { @@ -12222,7 +11603,7 @@ public final class RegionClientProtos { } } public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -12236,7 +11617,7 @@ public final class RegionClientProtos { return this; } public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { ensureResultIsMutable(); result_.set(index, builderForValue.build()); @@ -12246,7 +11627,7 @@ public final class RegionClientProtos { } return this; } - public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -12260,7 +11641,7 @@ public final class RegionClientProtos { return this; } public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -12274,7 +11655,7 @@ public final class RegionClientProtos { return this; } public Builder addResult( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { ensureResultIsMutable(); result_.add(builderForValue.build()); @@ -12285,7 +11666,7 @@ public final class RegionClientProtos { return this; } public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (resultBuilder_ == null) { ensureResultIsMutable(); result_.add(index, builderForValue.build()); @@ -12296,7 +11677,7 @@ public final class RegionClientProtos { return this; } public Builder addAllResult( - java.lang.Iterable values) { + java.lang.Iterable values) { if (resultBuilder_ == null) { ensureResultIsMutable(); super.addAll(values, result_); @@ -12326,18 +11707,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder( int index) { return getResultFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( int index) { if (resultBuilder_ == null) { return result_.get(index); } else { return resultBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getResultOrBuilderList() { if (resultBuilder_ != null) { return resultBuilder_.getMessageOrBuilderList(); @@ -12345,25 +11726,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(result_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder addResultBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder() { return getResultFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder addResultBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder( int index) { return getResultFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); } - public java.util.List + public java.util.List getResultBuilderList() { return getResultFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder() { if (resultBuilder_ == null) { resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( result_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), @@ -12480,12 +11861,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; } private int bitField0_; @@ -12585,10 +11966,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -12619,41 +12000,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -12662,7 +12043,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12673,12 +12054,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -12688,7 +12069,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -12701,18 +12082,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -12749,24 +12130,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -12774,8 +12155,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -12797,16 +12178,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -13062,12 +12443,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; } private int bitField0_; @@ -13151,10 +12532,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) obj; boolean result = true; result = result && (hasLockId() == other.hasLockId()); @@ -13188,41 +12569,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -13231,7 +12612,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13242,12 +12623,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13257,7 +12638,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -13270,18 +12651,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -13313,24 +12694,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -13338,8 +12719,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -13356,16 +12737,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()) return this; if (other.hasLockId()) { setLockId(other.getLockId()); } @@ -13508,12 +12889,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; } private int bitField0_; @@ -13608,10 +12989,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -13645,41 +13026,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -13688,7 +13069,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13699,12 +13080,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -13714,7 +13095,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -13727,18 +13108,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -13775,24 +13156,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -13800,8 +13181,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -13822,16 +13203,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -14046,12 +13427,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; } private void initFields() { @@ -14094,10 +13475,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) obj; boolean result = true; result = result && @@ -14113,41 +13494,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -14156,7 +13537,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -14167,12 +13548,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -14182,7 +13563,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -14195,18 +13576,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -14234,24 +13615,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -14259,23 +13640,23 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -14332,13 +13713,13 @@ public final class RegionClientProtos { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - java.util.List + java.util.List getFamilyPathList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); int getFamilyPathCount(); - java.util.List + java.util.List getFamilyPathOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index); } public static final class BulkLoadHFileRequest extends @@ -14361,12 +13742,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; } public interface FamilyPathOrBuilder @@ -14400,12 +13781,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; } private int bitField0_; @@ -14515,10 +13896,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); @@ -14552,41 +13933,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -14595,7 +13976,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -14606,12 +13987,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -14621,7 +14002,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -14634,18 +14015,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -14677,24 +14058,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -14702,8 +14083,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -14720,16 +14101,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } @@ -14878,21 +14259,21 @@ public final class RegionClientProtos { // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; public static final int FAMILYPATH_FIELD_NUMBER = 2; - private java.util.List familyPath_; - public java.util.List getFamilyPathList() { + private java.util.List familyPath_; + public java.util.List getFamilyPathList() { return familyPath_; } - public java.util.List + public java.util.List getFamilyPathOrBuilderList() { return familyPath_; } public int getFamilyPathCount() { return familyPath_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { return familyPath_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { return familyPath_.get(index); } @@ -14967,10 +14348,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -15001,41 +14382,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -15044,7 +14425,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -15055,12 +14436,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -15070,7 +14451,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -15083,18 +14464,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -15136,24 +14517,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -15161,8 +14542,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -15188,16 +14569,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -15282,7 +14663,7 @@ public final class RegionClientProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addFamilyPath(subBuilder.buildPartial()); break; @@ -15384,19 +14765,19 @@ public final class RegionClientProtos { } // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - private java.util.List familyPath_ = + private java.util.List familyPath_ = java.util.Collections.emptyList(); private void ensureFamilyPathIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyPath_ = new java.util.ArrayList(familyPath_); + familyPath_ = new java.util.ArrayList(familyPath_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; - public java.util.List getFamilyPathList() { + public java.util.List getFamilyPathList() { if (familyPathBuilder_ == null) { return java.util.Collections.unmodifiableList(familyPath_); } else { @@ -15410,7 +14791,7 @@ public final class RegionClientProtos { return familyPathBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { if (familyPathBuilder_ == null) { return familyPath_.get(index); } else { @@ -15418,7 +14799,7 @@ public final class RegionClientProtos { } } public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -15432,7 +14813,7 @@ public final class RegionClientProtos { return this; } public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.set(index, builderForValue.build()); @@ -15442,7 +14823,7 @@ public final class RegionClientProtos { } return this; } - public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { + public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -15456,7 +14837,7 @@ public final class RegionClientProtos { return this; } public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { if (familyPathBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -15470,7 +14851,7 @@ public final class RegionClientProtos { return this; } public Builder addFamilyPath( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.add(builderForValue.build()); @@ -15481,7 +14862,7 @@ public final class RegionClientProtos { return this; } public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); familyPath_.add(index, builderForValue.build()); @@ -15492,7 +14873,7 @@ public final class RegionClientProtos { return this; } public Builder addAllFamilyPath( - java.lang.Iterable values) { + java.lang.Iterable values) { if (familyPathBuilder_ == null) { ensureFamilyPathIsMutable(); super.addAll(values, familyPath_); @@ -15522,18 +14903,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( int index) { if (familyPathBuilder_ == null) { return familyPath_.get(index); } else { return familyPathBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getFamilyPathOrBuilderList() { if (familyPathBuilder_ != null) { return familyPathBuilder_.getMessageOrBuilderList(); @@ -15541,25 +14922,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(familyPath_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { return getFamilyPathFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( int index) { return getFamilyPathFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); } - public java.util.List + public java.util.List getFamilyPathBuilderList() { return getFamilyPathFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> getFamilyPathFieldBuilder() { if (familyPathBuilder_ == null) { familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( familyPath_, ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), @@ -15607,12 +14988,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; } private int bitField0_; @@ -15678,10 +15059,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; boolean result = true; result = result && (hasLoaded() == other.hasLoaded()); @@ -15706,41 +15087,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -15749,7 +15130,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -15760,12 +15141,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -15775,7 +15156,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -15788,18 +15169,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -15829,24 +15210,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -15854,8 +15235,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -15868,16 +15249,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; if (other.hasLoaded()) { setLoaded(other.getLoaded()); } @@ -15959,1018 +15340,6 @@ public final class RegionClientProtos { // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) } - public interface ParameterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string type = 1; - boolean hasType(); - String getType(); - - // optional bytes binaryValue = 2; - boolean hasBinaryValue(); - com.google.protobuf.ByteString getBinaryValue(); - } - public static final class Parameter extends - com.google.protobuf.GeneratedMessage - implements ParameterOrBuilder { - // Use Parameter.newBuilder() to construct. - private Parameter(Builder builder) { - super(builder); - } - private Parameter(boolean noInit) {} - - private static final Parameter defaultInstance; - public static Parameter getDefaultInstance() { - return defaultInstance; - } - - public Parameter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_fieldAccessorTable; - } - - private int bitField0_; - // required string type = 1; - public static final int TYPE_FIELD_NUMBER = 1; - private java.lang.Object type_; - public boolean hasType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getType() { - java.lang.Object ref = type_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - type_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getTypeBytes() { - java.lang.Object ref = type_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - type_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes binaryValue = 2; - public static final int BINARYVALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString binaryValue_; - public boolean hasBinaryValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getBinaryValue() { - return binaryValue_; - } - - private void initFields() { - type_ = ""; - binaryValue_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasType()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getTypeBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, binaryValue_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getTypeBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, binaryValue_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter) obj; - - boolean result = true; - result = result && (hasType() == other.hasType()); - if (hasType()) { - result = result && getType() - .equals(other.getType()); - } - result = result && (hasBinaryValue() == other.hasBinaryValue()); - if (hasBinaryValue()) { - result = result && getBinaryValue() - .equals(other.getBinaryValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasType()) { - hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + getType().hashCode(); - } - if (hasBinaryValue()) { - hash = (37 * hash) + BINARYVALUE_FIELD_NUMBER; - hash = (53 * hash) + getBinaryValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - type_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - binaryValue_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.type_ = type_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.binaryValue_ = binaryValue_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) return this; - if (other.hasType()) { - setType(other.getType()); - } - if (other.hasBinaryValue()) { - setBinaryValue(other.getBinaryValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasType()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - type_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - binaryValue_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string type = 1; - private java.lang.Object type_ = ""; - public boolean hasType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getType() { - java.lang.Object ref = type_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - type_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setType(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - type_ = value; - onChanged(); - return this; - } - public Builder clearType() { - bitField0_ = (bitField0_ & ~0x00000001); - type_ = getDefaultInstance().getType(); - onChanged(); - return this; - } - void setType(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - type_ = value; - onChanged(); - } - - // optional bytes binaryValue = 2; - private com.google.protobuf.ByteString binaryValue_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasBinaryValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getBinaryValue() { - return binaryValue_; - } - public Builder setBinaryValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - binaryValue_ = value; - onChanged(); - return this; - } - public Builder clearBinaryValue() { - bitField0_ = (bitField0_ & ~0x00000002); - binaryValue_ = getDefaultInstance().getBinaryValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Parameter) - } - - static { - defaultInstance = new Parameter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Parameter) - } - - public interface PropertyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // required string value = 2; - boolean hasValue(); - String getValue(); - } - public static final class Property extends - com.google.protobuf.GeneratedMessage - implements PropertyOrBuilder { - // Use Property.newBuilder() to construct. - private Property(Builder builder) { - super(builder); - } - private Property(boolean noInit) {} - - private static final Property defaultInstance; - public static Property getDefaultInstance() { - return defaultInstance; - } - - public Property getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private java.lang.Object value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getValue() { - java.lang.Object ref = value_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - value_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - name_ = ""; - value_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getValueBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getValueBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // required string value = 2; - private java.lang.Object value_ = ""; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getValue() { - java.lang.Object ref = value_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - value_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setValue(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - void setValue(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:Property) - } - - static { - defaultInstance = new Property(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Property) - } - public interface ExecOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -16986,24 +15355,24 @@ public final class RegionClientProtos { boolean hasMethodName(); String getMethodName(); - // repeated .Property property = 4; - java.util.List + // repeated .NameStringPair property = 4; + java.util.List getPropertyList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index); int getPropertyCount(); - java.util.List + java.util.List getPropertyOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( int index); - // repeated .Parameter parameter = 5; - java.util.List + // repeated .NameBytesPair parameter = 5; + java.util.List getParameterList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index); int getParameterCount(); - java.util.List + java.util.List getParameterOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( int index); } public static final class Exec extends @@ -17026,12 +15395,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; } private int bitField0_; @@ -17109,44 +15478,44 @@ public final class RegionClientProtos { } } - // repeated .Property property = 4; + // repeated .NameStringPair property = 4; public static final int PROPERTY_FIELD_NUMBER = 4; - private java.util.List property_; - public java.util.List getPropertyList() { + private java.util.List property_; + public java.util.List getPropertyList() { return property_; } - public java.util.List + public java.util.List getPropertyOrBuilderList() { return property_; } public int getPropertyCount() { return property_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { return property_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( int index) { return property_.get(index); } - // repeated .Parameter parameter = 5; + // repeated .NameBytesPair parameter = 5; public static final int PARAMETER_FIELD_NUMBER = 5; - private java.util.List parameter_; - public java.util.List getParameterList() { + private java.util.List parameter_; + public java.util.List getParameterList() { return parameter_; } - public java.util.List + public java.util.List getParameterOrBuilderList() { return parameter_; } public int getParameterCount() { return parameter_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { return parameter_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( int index) { return parameter_.get(index); } @@ -17255,10 +15624,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) obj; boolean result = true; result = result && (hasRow() == other.hasRow()); @@ -17313,41 +15682,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -17356,7 +15725,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -17367,12 +15736,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -17382,7 +15751,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -17395,18 +15764,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -17454,24 +15823,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -17479,8 +15848,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -17519,16 +15888,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) return this; if (other.hasRow()) { setRow(other.getRow()); } @@ -17661,13 +16030,13 @@ public final class RegionClientProtos { break; } case 34: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addProperty(subBuilder.buildPartial()); break; } case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addParameter(subBuilder.buildPartial()); break; @@ -17774,20 +16143,20 @@ public final class RegionClientProtos { onChanged(); } - // repeated .Property property = 4; - private java.util.List property_ = + // repeated .NameStringPair property = 4; + private java.util.List property_ = java.util.Collections.emptyList(); private void ensurePropertyIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { - property_ = new java.util.ArrayList(property_); + property_ = new java.util.ArrayList(property_); bitField0_ |= 0x00000008; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder> propertyBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> propertyBuilder_; - public java.util.List getPropertyList() { + public java.util.List getPropertyList() { if (propertyBuilder_ == null) { return java.util.Collections.unmodifiableList(property_); } else { @@ -17801,7 +16170,7 @@ public final class RegionClientProtos { return propertyBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { if (propertyBuilder_ == null) { return property_.get(index); } else { @@ -17809,7 +16178,7 @@ public final class RegionClientProtos { } } public Builder setProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (propertyBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -17823,7 +16192,7 @@ public final class RegionClientProtos { return this; } public Builder setProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (propertyBuilder_ == null) { ensurePropertyIsMutable(); property_.set(index, builderForValue.build()); @@ -17833,7 +16202,7 @@ public final class RegionClientProtos { } return this; } - public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { + public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (propertyBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -17847,7 +16216,7 @@ public final class RegionClientProtos { return this; } public Builder addProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { if (propertyBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -17861,7 +16230,7 @@ public final class RegionClientProtos { return this; } public Builder addProperty( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (propertyBuilder_ == null) { ensurePropertyIsMutable(); property_.add(builderForValue.build()); @@ -17872,7 +16241,7 @@ public final class RegionClientProtos { return this; } public Builder addProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { if (propertyBuilder_ == null) { ensurePropertyIsMutable(); property_.add(index, builderForValue.build()); @@ -17883,7 +16252,7 @@ public final class RegionClientProtos { return this; } public Builder addAllProperty( - java.lang.Iterable values) { + java.lang.Iterable values) { if (propertyBuilder_ == null) { ensurePropertyIsMutable(); super.addAll(values, property_); @@ -17913,18 +16282,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder getPropertyBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getPropertyBuilder( int index) { return getPropertyFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( int index) { if (propertyBuilder_ == null) { return property_.get(index); } else { return propertyBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getPropertyOrBuilderList() { if (propertyBuilder_ != null) { return propertyBuilder_.getMessageOrBuilderList(); @@ -17932,25 +16301,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(property_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder addPropertyBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder() { return getPropertyFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder addPropertyBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder( int index) { return getPropertyFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); } - public java.util.List + public java.util.List getPropertyBuilderList() { return getPropertyFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getPropertyFieldBuilder() { if (propertyBuilder_ == null) { propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( property_, ((bitField0_ & 0x00000008) == 0x00000008), getParentForChildren(), @@ -17960,20 +16329,20 @@ public final class RegionClientProtos { return propertyBuilder_; } - // repeated .Parameter parameter = 5; - private java.util.List parameter_ = + // repeated .NameBytesPair parameter = 5; + private java.util.List parameter_ = java.util.Collections.emptyList(); private void ensureParameterIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { - parameter_ = new java.util.ArrayList(parameter_); + parameter_ = new java.util.ArrayList(parameter_); bitField0_ |= 0x00000010; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> parameterBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> parameterBuilder_; - public java.util.List getParameterList() { + public java.util.List getParameterList() { if (parameterBuilder_ == null) { return java.util.Collections.unmodifiableList(parameter_); } else { @@ -17987,7 +16356,7 @@ public final class RegionClientProtos { return parameterBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index) { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { if (parameterBuilder_ == null) { return parameter_.get(index); } else { @@ -17995,7 +16364,7 @@ public final class RegionClientProtos { } } public Builder setParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (parameterBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -18009,7 +16378,7 @@ public final class RegionClientProtos { return this; } public Builder setParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (parameterBuilder_ == null) { ensureParameterIsMutable(); parameter_.set(index, builderForValue.build()); @@ -18019,7 +16388,7 @@ public final class RegionClientProtos { } return this; } - public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (parameterBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -18033,7 +16402,7 @@ public final class RegionClientProtos { return this; } public Builder addParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (parameterBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -18047,7 +16416,7 @@ public final class RegionClientProtos { return this; } public Builder addParameter( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (parameterBuilder_ == null) { ensureParameterIsMutable(); parameter_.add(builderForValue.build()); @@ -18058,7 +16427,7 @@ public final class RegionClientProtos { return this; } public Builder addParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (parameterBuilder_ == null) { ensureParameterIsMutable(); parameter_.add(index, builderForValue.build()); @@ -18069,7 +16438,7 @@ public final class RegionClientProtos { return this; } public Builder addAllParameter( - java.lang.Iterable values) { + java.lang.Iterable values) { if (parameterBuilder_ == null) { ensureParameterIsMutable(); super.addAll(values, parameter_); @@ -18099,18 +16468,18 @@ public final class RegionClientProtos { } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getParameterBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getParameterBuilder( int index) { return getParameterFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( int index) { if (parameterBuilder_ == null) { return parameter_.get(index); } else { return parameterBuilder_.getMessageOrBuilder(index); } } - public java.util.List + public java.util.List getParameterOrBuilderList() { if (parameterBuilder_ != null) { return parameterBuilder_.getMessageOrBuilderList(); @@ -18118,25 +16487,25 @@ public final class RegionClientProtos { return java.util.Collections.unmodifiableList(parameter_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addParameterBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder() { return getParameterFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addParameterBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder( int index) { return getParameterFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public java.util.List + public java.util.List getParameterBuilderList() { return getParameterFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getParameterFieldBuilder() { if (parameterBuilder_ == null) { parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( parameter_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), @@ -18167,8 +16536,8 @@ public final class RegionClientProtos { // required .Exec call = 2; boolean hasCall(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder(); } public static final class ExecCoprocessorRequest extends com.google.protobuf.GeneratedMessage @@ -18190,12 +16559,12 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; } private int bitField0_; @@ -18214,20 +16583,20 @@ public final class RegionClientProtos { // required .Exec call = 2; public static final int CALL_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec call_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_; public boolean hasCall() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { return call_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { return call_; } private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -18297,10 +16666,10 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) obj; boolean result = true; result = result && (hasRegion() == other.hasRegion()); @@ -18334,41 +16703,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -18377,7 +16746,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -18388,12 +16757,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -18403,7 +16772,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -18416,18 +16785,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -18455,7 +16824,7 @@ public final class RegionClientProtos { } bitField0_ = (bitField0_ & ~0x00000001); if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); } else { callBuilder_.clear(); } @@ -18469,24 +16838,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -18494,8 +16863,8 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -18520,16 +16889,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; if (other.hasRegion()) { mergeRegion(other.getRegion()); } @@ -18593,7 +16962,7 @@ public final class RegionClientProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(); if (hasCall()) { subBuilder.mergeFrom(getCall()); } @@ -18698,20 +17067,20 @@ public final class RegionClientProtos { } // required .Exec call = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder> callBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> callBuilder_; public boolean hasCall() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { if (callBuilder_ == null) { return call_; } else { return callBuilder_.getMessage(); } } - public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec value) { + public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { if (callBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -18725,7 +17094,7 @@ public final class RegionClientProtos { return this; } public Builder setCall( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) { if (callBuilder_ == null) { call_ = builderForValue.build(); onChanged(); @@ -18735,12 +17104,12 @@ public final class RegionClientProtos { bitField0_ |= 0x00000002; return this; } - public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec value) { + public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { if (callBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - call_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance()) { + call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) { call_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); } else { call_ = value; } @@ -18753,7 +17122,7 @@ public final class RegionClientProtos { } public Builder clearCall() { if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); onChanged(); } else { callBuilder_.clear(); @@ -18761,12 +17130,12 @@ public final class RegionClientProtos { bitField0_ = (bitField0_ & ~0x00000002); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder getCallBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getCallBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCallFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { if (callBuilder_ != null) { return callBuilder_.getMessageOrBuilder(); } else { @@ -18774,11 +17143,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> getCallFieldBuilder() { if (callBuilder_ == null) { callBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>( call_, getParentForChildren(), isClean()); @@ -18801,14 +17170,10 @@ public final class RegionClientProtos { public interface ExecCoprocessorResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bytes regionName = 1; - boolean hasRegionName(); - com.google.protobuf.ByteString getRegionName(); - - // required .Parameter value = 2; + // required .NameBytesPair value = 1; boolean hasValue(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); } public static final class ExecCoprocessorResponse extends com.google.protobuf.GeneratedMessage @@ -18830,51 +17195,36 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; } private int bitField0_; - // required bytes regionName = 1; - public static final int REGIONNAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString regionName_; - public boolean hasRegionName() { + // required .NameBytesPair value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - - // required .Parameter value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { return value_; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { return value_; } private void initFields() { - regionName_ = com.google.protobuf.ByteString.EMPTY; - value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasRegionName()) { - memoizedIsInitialized = 0; - return false; - } if (!hasValue()) { memoizedIsInitialized = 0; return false; @@ -18891,10 +17241,7 @@ public final class RegionClientProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, regionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, value_); + output.writeMessage(1, value_); } getUnknownFields().writeTo(output); } @@ -18907,11 +17254,7 @@ public final class RegionClientProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, regionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, value_); + .computeMessageSize(1, value_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -18930,17 +17273,12 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) obj; boolean result = true; - result = result && (hasRegionName() == other.hasRegionName()); - if (hasRegionName()) { - result = result && getRegionName() - .equals(other.getRegionName()); - } result = result && (hasValue() == other.hasValue()); if (hasValue()) { result = result && getValue() @@ -18955,10 +17293,6 @@ public final class RegionClientProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionName()) { - hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getRegionName().hashCode(); - } if (hasValue()) { hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + getValue().hashCode(); @@ -18967,41 +17301,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -19010,7 +17344,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -19021,12 +17355,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -19036,7 +17370,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -19049,18 +17383,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -19080,14 +17414,12 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); - regionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } else { valueBuilder_.clear(); } - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -19097,24 +17429,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -19122,17 +17454,13 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.regionName_ = regionName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } if (valueBuilder_ == null) { result.value_ = value_; } else { @@ -19144,19 +17472,16 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; - if (other.hasRegionName()) { - setRegionName(other.getRegionName()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; if (other.hasValue()) { mergeValue(other.getValue()); } @@ -19165,10 +17490,6 @@ public final class RegionClientProtos { } public final boolean isInitialized() { - if (!hasRegionName()) { - - return false; - } if (!hasValue()) { return false; @@ -19204,12 +17525,7 @@ public final class RegionClientProtos { break; } case 10: { - bitField0_ |= 0x00000001; - regionName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); if (hasValue()) { subBuilder.mergeFrom(getValue()); } @@ -19223,45 +17539,21 @@ public final class RegionClientProtos { private int bitField0_; - // required bytes regionName = 1; - private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRegionName() { + // required .NameBytesPair value = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - public Builder setRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - regionName_ = value; - onChanged(); - return this; - } - public Builder clearRegionName() { - bitField0_ = (bitField0_ & ~0x00000001); - regionName_ = getDefaultInstance().getRegionName(); - onChanged(); - return this; - } - - // required .Parameter value = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> valueBuilder_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { if (valueBuilder_ == null) { return value_; } else { return valueBuilder_.getMessage(); } } - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -19271,26 +17563,26 @@ public final class RegionClientProtos { } else { valueBuilder_.setMessage(value); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; return this; } public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { if (valueBuilder_ == null) { value_ = builderForValue.build(); onChanged(); } else { valueBuilder_.setMessage(builderForValue.build()); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; return this; } - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - value_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { value_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(value_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); } else { value_ = value; } @@ -19298,25 +17590,25 @@ public final class RegionClientProtos { } else { valueBuilder_.mergeFrom(value); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; return this; } public Builder clearValue() { if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); onChanged(); } else { valueBuilder_.clear(); } - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000001); return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getValueBuilder() { - bitField0_ |= 0x00000002; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000001; onChanged(); return getValueFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { @@ -19324,11 +17616,11 @@ public final class RegionClientProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( value_, getParentForChildren(), isClean()); @@ -19348,20 +17640,658 @@ public final class RegionClientProtos { // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse) } + public interface ActionResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .NameBytesPair value = 1; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + + // optional .NameBytesPair exception = 2; + boolean hasException(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); + } + public static final class ActionResult extends + com.google.protobuf.GeneratedMessage + implements ActionResultOrBuilder { + // Use ActionResult.newBuilder() to construct. + private ActionResult(Builder builder) { + super(builder); + } + private ActionResult(boolean noInit) {} + + private static final ActionResult defaultInstance; + public static ActionResult getDefaultInstance() { + return defaultInstance; + } + + public ActionResult getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + } + + private int bitField0_; + // optional .NameBytesPair value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + return value_; + } + + // optional .NameBytesPair exception = 2; + public static final int EXCEPTION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + return exception_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + return exception_; + } + + private void initFields() { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasValue()) { + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, value_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, exception_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, value_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, exception_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + getExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()) return this; + if (other.hasValue()) { + mergeValue(other.getValue()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasValue()) { + if (!getValue().isInitialized()) { + + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasException()) { + subBuilder.mergeFrom(getException()); + } + input.readMessage(subBuilder, extensionRegistry); + setException(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // optional .NameBytesPair value = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // optional .NameBytesPair exception = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + if (exceptionBuilder_ == null) { + return exception_; + } else { + return exceptionBuilder_.getMessage(); + } + } + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ActionResult) + } + + static { + defaultInstance = new ActionResult(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ActionResult) + } + public interface MultiRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - // repeated .Parameter request = 1; - java.util.List - getRequestList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index); - int getRequestCount(); - java.util.List - getRequestOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated .NameBytesPair action = 2; + java.util.List + getActionList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAction(int index); + int getActionCount(); + java.util.List + getActionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getActionOrBuilder( int index); - // optional bool atomic = 2; + // optional bool atomic = 3; boolean hasAtomic(); boolean getAtomic(); } @@ -19385,48 +18315,62 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; } private int bitField0_; - // repeated .Parameter request = 1; - public static final int REQUEST_FIELD_NUMBER = 1; - private java.util.List request_; - public java.util.List getRequestList() { - return request_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); } - public java.util.List - getRequestOrBuilderList() { - return request_; + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; } - public int getRequestCount() { - return request_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index) { - return request_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( - int index) { - return request_.get(index); + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; } - // optional bool atomic = 2; - public static final int ATOMIC_FIELD_NUMBER = 2; + // repeated .NameBytesPair action = 2; + public static final int ACTION_FIELD_NUMBER = 2; + private java.util.List action_; + public java.util.List getActionList() { + return action_; + } + public java.util.List + getActionOrBuilderList() { + return action_; + } + public int getActionCount() { + return action_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAction(int index) { + return action_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getActionOrBuilder( + int index) { + return action_.get(index); + } + + // optional bool atomic = 3; + public static final int ATOMIC_FIELD_NUMBER = 3; private boolean atomic_; public boolean hasAtomic() { - return ((bitField0_ & 0x00000001) == 0x00000001); + return ((bitField0_ & 0x00000002) == 0x00000002); } public boolean getAtomic() { return atomic_; } private void initFields() { - request_ = java.util.Collections.emptyList(); + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + action_ = java.util.Collections.emptyList(); atomic_ = false; } private byte memoizedIsInitialized = -1; @@ -19434,8 +18378,16 @@ public final class RegionClientProtos { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - for (int i = 0; i < getRequestCount(); i++) { - if (!getRequest(i).isInitialized()) { + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getActionCount(); i++) { + if (!getAction(i).isInitialized()) { memoizedIsInitialized = 0; return false; } @@ -19447,11 +18399,14 @@ public final class RegionClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - for (int i = 0; i < request_.size(); i++) { - output.writeMessage(1, request_.get(i)); - } if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(2, atomic_); + output.writeMessage(1, region_); + } + for (int i = 0; i < action_.size(); i++) { + output.writeMessage(2, action_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, atomic_); } getUnknownFields().writeTo(output); } @@ -19462,13 +18417,17 @@ public final class RegionClientProtos { if (size != -1) return size; size = 0; - for (int i = 0; i < request_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, request_.get(i)); - } if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, atomic_); + .computeMessageSize(1, region_); + } + for (int i = 0; i < action_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, action_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, atomic_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -19487,14 +18446,19 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; boolean result = true; - result = result && getRequestList() - .equals(other.getRequestList()); + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getActionList() + .equals(other.getActionList()); result = result && (hasAtomic() == other.hasAtomic()); if (hasAtomic()) { result = result && (getAtomic() @@ -19509,9 +18473,13 @@ public final class RegionClientProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRequestCount() > 0) { - hash = (37 * hash) + REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getRequestList().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getActionCount() > 0) { + hash = (37 * hash) + ACTION_FIELD_NUMBER; + hash = (53 * hash) + getActionList().hashCode(); } if (hasAtomic()) { hash = (37 * hash) + ATOMIC_FIELD_NUMBER; @@ -19521,41 +18489,41 @@ public final class RegionClientProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -19564,7 +18532,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -19575,12 +18543,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -19590,7 +18558,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -19603,18 +18571,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -19625,7 +18593,8 @@ public final class RegionClientProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRequestFieldBuilder(); + getRegionFieldBuilder(); + getActionFieldBuilder(); } } private static Builder create() { @@ -19634,14 +18603,20 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); - if (requestBuilder_ == null) { - request_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { - requestBuilder_.clear(); + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (actionBuilder_ == null) { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + actionBuilder_.clear(); } atomic_ = false; - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -19651,24 +18626,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -19676,22 +18651,30 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; - if (requestBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - request_ = java.util.Collections.unmodifiableList(request_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.request_ = request_; - } else { - result.request_ = requestBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (actionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + action_ = java.util.Collections.unmodifiableList(action_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.action_ = action_; + } else { + result.action_ = actionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } result.atomic_ = atomic_; result.bitField0_ = to_bitField0_; onBuilt(); @@ -19699,39 +18682,42 @@ public final class RegionClientProtos { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance()) return this; - if (requestBuilder_ == null) { - if (!other.request_.isEmpty()) { - if (request_.isEmpty()) { - request_ = other.request_; - bitField0_ = (bitField0_ & ~0x00000001); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (actionBuilder_ == null) { + if (!other.action_.isEmpty()) { + if (action_.isEmpty()) { + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000002); } else { - ensureRequestIsMutable(); - request_.addAll(other.request_); + ensureActionIsMutable(); + action_.addAll(other.action_); } onChanged(); } } else { - if (!other.request_.isEmpty()) { - if (requestBuilder_.isEmpty()) { - requestBuilder_.dispose(); - requestBuilder_ = null; - request_ = other.request_; - bitField0_ = (bitField0_ & ~0x00000001); - requestBuilder_ = + if (!other.action_.isEmpty()) { + if (actionBuilder_.isEmpty()) { + actionBuilder_.dispose(); + actionBuilder_ = null; + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000002); + actionBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRequestFieldBuilder() : null; + getActionFieldBuilder() : null; } else { - requestBuilder_.addAllMessages(other.request_); + actionBuilder_.addAllMessages(other.action_); } } } @@ -19743,8 +18729,16 @@ public final class RegionClientProtos { } public final boolean isInitialized() { - for (int i = 0; i < getRequestCount(); i++) { - if (!getRequest(i).isInitialized()) { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + for (int i = 0; i < getActionCount(); i++) { + if (!getAction(i).isInitialized()) { return false; } @@ -19776,13 +18770,22 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } input.readMessage(subBuilder, extensionRegistry); - addRequest(subBuilder.buildPartial()); + setRegion(subBuilder.buildPartial()); break; } - case 16: { - bitField0_ |= 0x00000002; + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAction(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; atomic_ = input.readBool(); break; } @@ -19792,208 +18795,298 @@ public final class RegionClientProtos { private int bitField0_; - // repeated .Parameter request = 1; - private java.util.List request_ = + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated .NameBytesPair action = 2; + private java.util.List action_ = java.util.Collections.emptyList(); - private void ensureRequestIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - request_ = new java.util.ArrayList(request_); - bitField0_ |= 0x00000001; + private void ensureActionIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + action_ = new java.util.ArrayList(action_); + bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> requestBuilder_; + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> actionBuilder_; - public java.util.List getRequestList() { - if (requestBuilder_ == null) { - return java.util.Collections.unmodifiableList(request_); + public java.util.List getActionList() { + if (actionBuilder_ == null) { + return java.util.Collections.unmodifiableList(action_); } else { - return requestBuilder_.getMessageList(); + return actionBuilder_.getMessageList(); } } - public int getRequestCount() { - if (requestBuilder_ == null) { - return request_.size(); + public int getActionCount() { + if (actionBuilder_ == null) { + return action_.size(); } else { - return requestBuilder_.getCount(); + return actionBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index) { - if (requestBuilder_ == null) { - return request_.get(index); + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAction(int index) { + if (actionBuilder_ == null) { + return action_.get(index); } else { - return requestBuilder_.getMessage(index); + return actionBuilder_.getMessage(index); } } - public Builder setRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (requestBuilder_ == null) { + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (actionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureRequestIsMutable(); - request_.set(index, value); + ensureActionIsMutable(); + action_.set(index, value); onChanged(); } else { - requestBuilder_.setMessage(index, value); + actionBuilder_.setMessage(index, value); } return this; } - public Builder setRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.set(index, builderForValue.build()); + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.set(index, builderForValue.build()); onChanged(); } else { - requestBuilder_.setMessage(index, builderForValue.build()); + actionBuilder_.setMessage(index, builderForValue.build()); } return this; } - public Builder addRequest(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (requestBuilder_ == null) { + public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (actionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureRequestIsMutable(); - request_.add(value); + ensureActionIsMutable(); + action_.add(value); onChanged(); } else { - requestBuilder_.addMessage(value); + actionBuilder_.addMessage(value); } return this; } - public Builder addRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (requestBuilder_ == null) { + public Builder addAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (actionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureRequestIsMutable(); - request_.add(index, value); + ensureActionIsMutable(); + action_.add(index, value); onChanged(); } else { - requestBuilder_.addMessage(index, value); + actionBuilder_.addMessage(index, value); } return this; } - public Builder addRequest( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.add(builderForValue.build()); + public Builder addAction( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.add(builderForValue.build()); onChanged(); } else { - requestBuilder_.addMessage(builderForValue.build()); + actionBuilder_.addMessage(builderForValue.build()); } return this; } - public Builder addRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.add(index, builderForValue.build()); + public Builder addAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.add(index, builderForValue.build()); onChanged(); } else { - requestBuilder_.addMessage(index, builderForValue.build()); + actionBuilder_.addMessage(index, builderForValue.build()); } return this; } - public Builder addAllRequest( - java.lang.Iterable values) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - super.addAll(values, request_); + public Builder addAllAction( + java.lang.Iterable values) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + super.addAll(values, action_); onChanged(); } else { - requestBuilder_.addAllMessages(values); + actionBuilder_.addAllMessages(values); } return this; } - public Builder clearRequest() { - if (requestBuilder_ == null) { - request_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); + public Builder clearAction() { + if (actionBuilder_ == null) { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { - requestBuilder_.clear(); + actionBuilder_.clear(); } return this; } - public Builder removeRequest(int index) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.remove(index); + public Builder removeAction(int index) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.remove(index); onChanged(); } else { - requestBuilder_.remove(index); + actionBuilder_.remove(index); } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getRequestBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getActionBuilder( int index) { - return getRequestFieldBuilder().getBuilder(index); + return getActionFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getActionOrBuilder( int index) { - if (requestBuilder_ == null) { - return request_.get(index); } else { - return requestBuilder_.getMessageOrBuilder(index); + if (actionBuilder_ == null) { + return action_.get(index); } else { + return actionBuilder_.getMessageOrBuilder(index); } } - public java.util.List - getRequestOrBuilderList() { - if (requestBuilder_ != null) { - return requestBuilder_.getMessageOrBuilderList(); + public java.util.List + getActionOrBuilderList() { + if (actionBuilder_ != null) { + return actionBuilder_.getMessageOrBuilderList(); } else { - return java.util.Collections.unmodifiableList(request_); + return java.util.Collections.unmodifiableList(action_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addRequestBuilder() { - return getRequestFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addActionBuilder() { + return getActionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addRequestBuilder( + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addActionBuilder( int index) { - return getRequestFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + return getActionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); } - public java.util.List - getRequestBuilderList() { - return getRequestFieldBuilder().getBuilderList(); + public java.util.List + getActionBuilderList() { + return getActionFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getRequestFieldBuilder() { - if (requestBuilder_ == null) { - requestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - request_, - ((bitField0_ & 0x00000001) == 0x00000001), + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getActionFieldBuilder() { + if (actionBuilder_ == null) { + actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + action_, + ((bitField0_ & 0x00000002) == 0x00000002), getParentForChildren(), isClean()); - request_ = null; + action_ = null; } - return requestBuilder_; + return actionBuilder_; } - // optional bool atomic = 2; + // optional bool atomic = 3; private boolean atomic_ ; public boolean hasAtomic() { - return ((bitField0_ & 0x00000002) == 0x00000002); + return ((bitField0_ & 0x00000004) == 0x00000004); } public boolean getAtomic() { return atomic_; } public Builder setAtomic(boolean value) { - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000004; atomic_ = value; onChanged(); return this; } public Builder clearAtomic() { - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000004); atomic_ = false; onChanged(); return this; @@ -20013,14 +19106,14 @@ public final class RegionClientProtos { public interface MultiResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // repeated .Parameter response = 1; - java.util.List - getResponseList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index); - int getResponseCount(); - java.util.List - getResponseOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( + // repeated .ActionResult result = 1; + java.util.List + getResultList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index); + int getResultCount(); + java.util.List + getResultOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( int index); } public static final class MultiResponse extends @@ -20043,45 +19136,45 @@ public final class RegionClientProtos { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; } - // repeated .Parameter response = 1; - public static final int RESPONSE_FIELD_NUMBER = 1; - private java.util.List response_; - public java.util.List getResponseList() { - return response_; + // repeated .ActionResult result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + public java.util.List getResultList() { + return result_; } - public java.util.List - getResponseOrBuilderList() { - return response_; + public java.util.List + getResultOrBuilderList() { + return result_; } - public int getResponseCount() { - return response_.size(); + public int getResultCount() { + return result_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index) { - return response_.get(index); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { + return result_.get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( int index) { - return response_.get(index); + return result_.get(index); } private void initFields() { - response_ = java.util.Collections.emptyList(); + result_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - for (int i = 0; i < getResponseCount(); i++) { - if (!getResponse(i).isInitialized()) { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { memoizedIsInitialized = 0; return false; } @@ -20093,8 +19186,8 @@ public final class RegionClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - for (int i = 0; i < response_.size(); i++) { - output.writeMessage(1, response_.get(i)); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); } getUnknownFields().writeTo(output); } @@ -20105,9 +19198,9 @@ public final class RegionClientProtos { if (size != -1) return size; size = 0; - for (int i = 0; i < response_.size(); i++) { + for (int i = 0; i < result_.size(); i++) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, response_.get(i)); + .computeMessageSize(1, result_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -20126,14 +19219,14 @@ public final class RegionClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj; boolean result = true; - result = result && getResponseList() - .equals(other.getResponseList()); + result = result && getResultList() + .equals(other.getResultList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -20143,49 +19236,49 @@ public final class RegionClientProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResponseCount() > 0) { - hash = (37 * hash) + RESPONSE_FIELD_NUMBER; - hash = (53 * hash) + getResponseList().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -20194,7 +19287,7 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -20205,12 +19298,12 @@ public final class RegionClientProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -20220,7 +19313,7 @@ public final class RegionClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -20233,18 +19326,18 @@ public final class RegionClientProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -20255,7 +19348,7 @@ public final class RegionClientProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResponseFieldBuilder(); + getResultFieldBuilder(); } } private static Builder create() { @@ -20264,11 +19357,11 @@ public final class RegionClientProtos { public Builder clear() { super.clear(); - if (responseBuilder_ == null) { - response_ = java.util.Collections.emptyList(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { - responseBuilder_.clear(); + resultBuilder_.clear(); } return this; } @@ -20279,24 +19372,24 @@ public final class RegionClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -20304,56 +19397,56 @@ public final class RegionClientProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this); int from_bitField0_ = bitField0_; - if (responseBuilder_ == null) { + if (resultBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { - response_ = java.util.Collections.unmodifiableList(response_); + result_ = java.util.Collections.unmodifiableList(result_); bitField0_ = (bitField0_ & ~0x00000001); } - result.response_ = response_; + result.result_ = result_; } else { - result.response_ = responseBuilder_.build(); + result.result_ = resultBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance()) return this; - if (responseBuilder_ == null) { - if (!other.response_.isEmpty()) { - if (response_.isEmpty()) { - response_ = other.response_; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; bitField0_ = (bitField0_ & ~0x00000001); } else { - ensureResponseIsMutable(); - response_.addAll(other.response_); + ensureResultIsMutable(); + result_.addAll(other.result_); } onChanged(); } } else { - if (!other.response_.isEmpty()) { - if (responseBuilder_.isEmpty()) { - responseBuilder_.dispose(); - responseBuilder_ = null; - response_ = other.response_; + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; bitField0_ = (bitField0_ & ~0x00000001); - responseBuilder_ = + resultBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResponseFieldBuilder() : null; + getResultFieldBuilder() : null; } else { - responseBuilder_.addAllMessages(other.response_); + resultBuilder_.addAllMessages(other.result_); } } } @@ -20362,8 +19455,8 @@ public final class RegionClientProtos { } public final boolean isInitialized() { - for (int i = 0; i < getResponseCount(); i++) { - if (!getResponse(i).isInitialized()) { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { return false; } @@ -20395,9 +19488,9 @@ public final class RegionClientProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder(); input.readMessage(subBuilder, extensionRegistry); - addResponse(subBuilder.buildPartial()); + addResult(subBuilder.buildPartial()); break; } } @@ -20406,190 +19499,190 @@ public final class RegionClientProtos { private int bitField0_; - // repeated .Parameter response = 1; - private java.util.List response_ = + // repeated .ActionResult result = 1; + private java.util.List result_ = java.util.Collections.emptyList(); - private void ensureResponseIsMutable() { + private void ensureResultIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - response_ = new java.util.ArrayList(response_); + result_ = new java.util.ArrayList(result_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> responseBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> resultBuilder_; - public java.util.List getResponseList() { - if (responseBuilder_ == null) { - return java.util.Collections.unmodifiableList(response_); + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); } else { - return responseBuilder_.getMessageList(); + return resultBuilder_.getMessageList(); } } - public int getResponseCount() { - if (responseBuilder_ == null) { - return response_.size(); + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); } else { - return responseBuilder_.getCount(); + return resultBuilder_.getCount(); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index) { - if (responseBuilder_ == null) { - return response_.get(index); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { - return responseBuilder_.getMessage(index); + return resultBuilder_.getMessage(index); } } - public Builder setResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (responseBuilder_ == null) { + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureResponseIsMutable(); - response_.set(index, value); + ensureResultIsMutable(); + result_.set(index, value); onChanged(); } else { - responseBuilder_.setMessage(index, value); + resultBuilder_.setMessage(index, value); } return this; } - public Builder setResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.set(index, builderForValue.build()); + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); onChanged(); } else { - responseBuilder_.setMessage(index, builderForValue.build()); + resultBuilder_.setMessage(index, builderForValue.build()); } return this; } - public Builder addResponse(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (responseBuilder_ == null) { + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureResponseIsMutable(); - response_.add(value); + ensureResultIsMutable(); + result_.add(value); onChanged(); } else { - responseBuilder_.addMessage(value); + resultBuilder_.addMessage(value); } return this; } - public Builder addResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (responseBuilder_ == null) { + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureResponseIsMutable(); - response_.add(index, value); + ensureResultIsMutable(); + result_.add(index, value); onChanged(); } else { - responseBuilder_.addMessage(index, value); + resultBuilder_.addMessage(index, value); } return this; } - public Builder addResponse( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.add(builderForValue.build()); + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); onChanged(); } else { - responseBuilder_.addMessage(builderForValue.build()); + resultBuilder_.addMessage(builderForValue.build()); } return this; } - public Builder addResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.add(index, builderForValue.build()); + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); onChanged(); } else { - responseBuilder_.addMessage(index, builderForValue.build()); + resultBuilder_.addMessage(index, builderForValue.build()); } return this; } - public Builder addAllResponse( - java.lang.Iterable values) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - super.addAll(values, response_); + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); onChanged(); } else { - responseBuilder_.addAllMessages(values); + resultBuilder_.addAllMessages(values); } return this; } - public Builder clearResponse() { - if (responseBuilder_ == null) { - response_ = java.util.Collections.emptyList(); + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { - responseBuilder_.clear(); + resultBuilder_.clear(); } return this; } - public Builder removeResponse(int index) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.remove(index); + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); onChanged(); } else { - responseBuilder_.remove(index); + resultBuilder_.remove(index); } return this; } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getResponseBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder getResultBuilder( int index) { - return getResponseFieldBuilder().getBuilder(index); + return getResultFieldBuilder().getBuilder(index); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( int index) { - if (responseBuilder_ == null) { - return response_.get(index); } else { - return responseBuilder_.getMessageOrBuilder(index); + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); } } - public java.util.List - getResponseOrBuilderList() { - if (responseBuilder_ != null) { - return responseBuilder_.getMessageOrBuilderList(); + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); } else { - return java.util.Collections.unmodifiableList(response_); + return java.util.Collections.unmodifiableList(result_); } } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addResponseBuilder() { - return getResponseFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addResponseBuilder( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder( int index) { - return getResponseFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); } - public java.util.List - getResponseBuilderList() { - return getResponseFieldBuilder().getBuilderList(); + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getResponseFieldBuilder() { - if (responseBuilder_ == null) { - responseBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - response_, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder>( + result_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); - response_ = null; + result_ = null; } - return responseBuilder_; + return resultBuilder_; } // @@protoc_insertion_point(builder_scope:MultiResponse) @@ -20603,117 +19696,117 @@ public final class RegionClientProtos { // @@protoc_insertion_point(class_scope:MultiResponse) } - public static abstract class RegionClientService + public static abstract class ClientService implements com.google.protobuf.Service { - protected RegionClientService() {} + protected ClientService() {} public interface Interface { public abstract void get( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); public abstract void mutate( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); public abstract void scan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); public abstract void lockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); public abstract void unlockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); public abstract void bulkLoadHFile( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); public abstract void execCoprocessor( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); public abstract void multi( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { - return new RegionClientService() { + return new ClientService() { @java.lang.Override public void get( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { impl.get(controller, request, done); } @java.lang.Override public void mutate( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { impl.mutate(controller, request, done); } @java.lang.Override public void scan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { impl.scan(controller, request, done); } @java.lang.Override public void lockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { impl.lockRow(controller, request, done); } @java.lang.Override public void unlockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { impl.unlockRow(controller, request, done); } @java.lang.Override public void bulkLoadHFile( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { impl.bulkLoadHFile(controller, request, done); } @java.lang.Override public void execCoprocessor( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { impl.execCoprocessor(controller, request, done); } @java.lang.Override public void multi( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { impl.multi(controller, request, done); } @@ -20740,21 +19833,21 @@ public final class RegionClientProtos { } switch(method.getIndex()) { case 0: - return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)request); + return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request); case 1: - return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)request); + return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request); case 2: - return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)request); + return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request); case 3: - return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)request); + return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request); case 4: - return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)request); + return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request); case 5: - return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)request); + return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request); case 6: - return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)request); + return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request); case 7: - return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)request); + return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -20770,21 +19863,21 @@ public final class RegionClientProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -20800,21 +19893,21 @@ public final class RegionClientProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -20825,48 +19918,48 @@ public final class RegionClientProtos { public abstract void get( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); public abstract void mutate( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); public abstract void scan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); public abstract void lockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); public abstract void unlockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); public abstract void bulkLoadHFile( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); public abstract void execCoprocessor( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); public abstract void multi( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.getDescriptor().getServices().get(0); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { @@ -20886,43 +19979,43 @@ public final class RegionClientProtos { } switch(method.getIndex()) { case 0: - this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 1: - this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 2: - this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 3: - this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 4: - this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 5: - this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 6: - this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 7: - this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; default: @@ -20940,21 +20033,21 @@ public final class RegionClientProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -20970,21 +20063,21 @@ public final class RegionClientProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -20995,7 +20088,7 @@ public final class RegionClientProtos { return new Stub(channel); } - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.RegionClientService implements Interface { + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } @@ -21008,122 +20101,122 @@ public final class RegionClientProtos { public void get( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance())); } public void mutate( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance())); } public void scan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance())); } public void lockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(3), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance())); } public void unlockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(4), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance())); } public void bulkLoadHFile( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(5), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); } public void execCoprocessor( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(6), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance())); } public void multi( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(7), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); } } @@ -21133,44 +20226,44 @@ public final class RegionClientProtos { } public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse get( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse mutate( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse scan( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse lockRow( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse unlockRow( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse bulkLoadHFile( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse execCoprocessor( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse multi( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) throws com.google.protobuf.ServiceException; } @@ -21181,99 +20274,99 @@ public final class RegionClientProtos { private final com.google.protobuf.BlockingRpcChannel channel; - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse get( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse mutate( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse scan( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse lockRow( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(3), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse unlockRow( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(4), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse bulkLoadHFile( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(5), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse execCoprocessor( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(6), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse multi( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(7), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); } } @@ -21284,11 +20377,6 @@ public final class RegionClientProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Column_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Attribute_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Attribute_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_Get_descriptor; private static @@ -21389,16 +20477,6 @@ public final class RegionClientProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BulkLoadHFileResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Parameter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Parameter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Property_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Property_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_Exec_descriptor; private static @@ -21414,6 +20492,11 @@ public final class RegionClientProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ExecCoprocessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ActionResult_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ActionResult_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_MultiRequest_descriptor; private static @@ -21433,94 +20516,90 @@ public final class RegionClientProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\022RegionClient.proto\032\013hbase.proto\"+\n\006Col" + - "umn\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"(" + - "\n\tAttribute\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014" + - "\"\310\001\n\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007." + - "Column\022\035\n\tattribute\030\003 \003(\0132\n.Attribute\022\016\n" + - "\006lockId\030\004 \001(\004\022\032\n\006filter\030\005 \001(\0132\n.Paramete" + + "\n\014Client.proto\032\013hbase.proto\"+\n\006Column\022\016\n" + + "\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"\320\001\n\003Get" + + "\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007.Column\022!" + + "\n\tattribute\030\003 \003(\0132\016.NameBytesPair\022\016\n\006loc" + + "kId\030\004 \001(\004\022\036\n\006filter\030\005 \001(\0132\016.NameBytesPai" + "r\022\035\n\ttimeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxV" + "ersions\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004t" + - "rue\"\"\n\006Result\022\030\n\005value\030\001 \003(\0132\t.KeyValue\"" + - "r\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020.RegionSp", - "ecifier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020closestRow" + - "Before\030\003 \001(\010\022\025\n\rexistenceOnly\030\004 \001(\010\"6\n\013G" + - "etResponse\022\027\n\006result\030\001 \001(\0132\007.Result\022\016\n\006e" + - "xists\030\002 \001(\010\"\355\003\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016" + - "\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\013com" + - "pareType\030\004 \002(\0162\026.Condition.CompareType\022)" + - "\n\ncomparator\030\005 \002(\0162\025.Condition.Comparato" + - "r\022\r\n\005value\030\006 \001(\014\"r\n\013CompareType\022\010\n\004LESS\020" + - "\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_E" + - "QUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020", - "\005\022\t\n\005NO_OP\020\006\"\324\001\n\nComparator\022\025\n\021BINARY_CO" + - "MPARATOR\020\000\022\034\n\030BINARY_PREFIX_COMPARATOR\020\001" + - "\022\026\n\022BIT_AND_COMPARATOR\020\002\022\025\n\021BIT_OR_COMPA" + - "RATOR\020\003\022\026\n\022BIT_XOR_COMPARATOR\020\004\022\023\n\017NULL_" + - "COMPARATOR\020\005\022\033\n\027REGEX_STRING_COMPARATOR\020" + - "\006\022\030\n\024SUBSTRING_COMPARATOR\020\007\"\374\003\n\006Mutate\022\013" + - "\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Mutate" + + "rue\"\037\n\006Result\022\025\n\rkeyValueBytes\030\001 \003(\014\"r\n\n" + + "GetRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + + "fier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020closestRowBef", + "ore\030\003 \001(\010\022\025\n\rexistenceOnly\030\004 \001(\010\"6\n\013GetR" + + "esponse\022\027\n\006result\030\001 \001(\0132\007.Result\022\016\n\006exis" + + "ts\030\002 \001(\010\"\200\002\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016\n\006f" + + "amily\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\013compar" + + "eType\030\004 \002(\0162\026.Condition.CompareType\022\"\n\nc" + + "omparator\030\005 \002(\0132\016.NameBytesPair\"r\n\013Compa" + + "reType\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005E" + + "QUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUA" + + "L\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP\020\006\"\306\004\n\006Mutate\022\013" + + "\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Mutate", ".MutateType\022(\n\013columnValue\030\003 \003(\0132\023.Mutat" + - "e.ColumnValue\022\035\n\tattribute\030\004 \003(\0132\n.Attri" + - "bute\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030\006 \001(\004\022", - "\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeRange\030\n" + - " \001(\0132\n.TimeRange\032\263\001\n\013ColumnValue\022\016\n\006fami" + - "ly\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\".Mutat" + - "e.ColumnValue.QualifierValue\022\021\n\ttimestam" + - "p\030\003 \001(\004\032E\n\016QualifierValue\022\021\n\tqualifier\030\001" + - " \002(\014\022\r\n\005value\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\"b" + - "\n\nMutateType\022\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022" + - "\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\022\021\n\rDELETE_COLUMN\020\004\022" + - "\021\n\rDELETE_FAMILY\020\005\"i\n\rMutateRequest\022 \n\006r" + - "egion\030\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030", - "\002 \002(\0132\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Cond" + - "ition\"<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132" + - "\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\367\001\n\004Scan\022\027\n\006" + - "column\030\001 \003(\0132\007.Column\022\035\n\tattribute\030\002 \003(\013" + - "2\n.Attribute\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRo" + - "w\030\004 \001(\014\022\032\n\006filter\030\005 \001(\0132\n.Parameter\022\035\n\tt" + - "imeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersion" + - "s\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022\023\n" + - "\013rowsToCache\030\t \001(\r\022\021\n\tbatchSize\030\n \001(\r\"a\n" + - "\013ScanRequest\022\021\n\tscannerId\030\001 \001(\004\022\023\n\004scan\030", - "\002 \001(\0132\005.Scan\022\024\n\014numberOfRows\030\003 \001(\r\022\024\n\014cl" + - "oseScanner\030\004 \001(\010\"\\\n\014ScanResponse\022\027\n\006resu" + - "lt\030\001 \003(\0132\007.Result\022\021\n\tscannerId\030\002 \001(\004\022\023\n\013" + - "moreResults\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\"?\n\016LockRo" + - "wRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + - "er\022\013\n\003row\030\002 \003(\014\".\n\017LockRowResponse\022\016\n\006lo" + - "ckId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"D\n\020UnlockRowRequ" + - "est\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\016\n" + - "\006lockId\030\002 \002(\004\"\023\n\021UnlockRowResponse\"\232\001\n\024B" + - "ulkLoadHFileRequest\022 \n\006region\030\001 \002(\0132\020.Re", - "gionSpecifier\0224\n\nfamilyPath\030\002 \003(\0132 .Bulk" + - "LoadHFileRequest.FamilyPath\032*\n\nFamilyPat" + - "h\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkL" + - "oadHFileResponse\022\016\n\006loaded\030\001 \002(\010\".\n\tPara" + - "meter\022\014\n\004type\030\001 \002(\t\022\023\n\013binaryValue\030\002 \001(\014" + - "\"\'\n\010Property\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(" + - "\t\"y\n\004Exec\022\013\n\003row\030\001 \002(\014\022\024\n\014protocolName\030\002" + - " \002(\t\022\022\n\nmethodName\030\003 \002(\t\022\033\n\010property\030\004 \003" + - "(\0132\t.Property\022\035\n\tparameter\030\005 \003(\0132\n.Param" + - "eter\"O\n\026ExecCoprocessorRequest\022 \n\006region", - "\030\001 \002(\0132\020.RegionSpecifier\022\023\n\004call\030\002 \002(\0132\005" + - ".Exec\"H\n\027ExecCoprocessorResponse\022\022\n\nregi" + - "onName\030\001 \002(\014\022\031\n\005value\030\002 \002(\0132\n.Parameter\"" + - ";\n\014MultiRequest\022\033\n\007request\030\001 \003(\0132\n.Param" + - "eter\022\016\n\006atomic\030\002 \001(\010\"-\n\rMultiResponse\022\034\n" + - "\010response\030\001 \003(\0132\n.Parameter2\227\003\n\023RegionCl" + - "ientService\022 \n\003get\022\013.GetRequest\032\014.GetRes" + - "ponse\022)\n\006mutate\022\016.MutateRequest\032\017.Mutate" + - "Response\022#\n\004scan\022\014.ScanRequest\032\r.ScanRes" + - "ponse\022,\n\007lockRow\022\017.LockRowRequest\032\020.Lock", - "RowResponse\0222\n\tunlockRow\022\021.UnlockRowRequ" + - "est\032\022.UnlockRowResponse\022>\n\rbulkLoadHFile" + - "\022\025.BulkLoadHFileRequest\032\026.BulkLoadHFileR" + - "esponse\022D\n\017execCoprocessor\022\027.ExecCoproce" + - "ssorRequest\032\030.ExecCoprocessorResponse\022&\n" + - "\005multi\022\r.MultiRequest\032\016.MultiResponseBH\n" + - "*org.apache.hadoop.hbase.protobuf.genera" + - "tedB\022RegionClientProtosH\001\210\001\001\240\001\001" + "e.ColumnValue\022!\n\tattribute\030\004 \003(\0132\016.NameB" + + "ytesPair\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030\006 " + + "\001(\004\022\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeRan" + + "ge\030\n \001(\0132\n.TimeRange\032\310\001\n\013ColumnValue\022\016\n\006" + + "family\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\".M" + + "utate.ColumnValue.QualifierValue\032m\n\016Qual" + + "ifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002" + + " \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022&\n\ndeleteType\030\004 " + + "\001(\0162\022.Mutate.DeleteType\"<\n\nMutateType\022\n\n", + "\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DEL" + + "ETE\020\003\"U\n\nDeleteType\022\026\n\022DELETE_ONE_VERSIO" + + "N\020\000\022\034\n\030DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rDEL" + + "ETE_FAMILY\020\002\"i\n\rMutateRequest\022 \n\006region\030" + + "\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030\002 \002(\0132" + + "\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Condition\"" + + "<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132\007.Resu" + + "lt\022\021\n\tprocessed\030\002 \001(\010\"\352\001\n\004Scan\022\027\n\006column" + + "\030\001 \003(\0132\007.Column\022!\n\tattribute\030\002 \003(\0132\016.Nam" + + "eBytesPair\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRow\030", + "\004 \001(\014\022\036\n\006filter\030\005 \001(\0132\016.NameBytesPair\022\035\n" + + "\ttimeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersi" + + "ons\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022" + + "\021\n\tbatchSize\030\t \001(\r\"\203\001\n\013ScanRequest\022 \n\006re" + + "gion\030\001 \001(\0132\020.RegionSpecifier\022\023\n\004scan\030\002 \001" + + "(\0132\005.Scan\022\021\n\tscannerId\030\003 \001(\004\022\024\n\014numberOf" + + "Rows\030\004 \001(\r\022\024\n\014closeScanner\030\005 \001(\010\"\\\n\014Scan" + + "Response\022\027\n\006result\030\001 \003(\0132\007.Result\022\021\n\tsca" + + "nnerId\030\002 \001(\004\022\023\n\013moreResults\030\003 \001(\010\022\013\n\003ttl" + + "\030\004 \001(\r\"?\n\016LockRowRequest\022 \n\006region\030\001 \002(\013", + "2\020.RegionSpecifier\022\013\n\003row\030\002 \003(\014\".\n\017LockR" + + "owResponse\022\016\n\006lockId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"" + + "D\n\020UnlockRowRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\016\n\006lockId\030\002 \002(\004\"\023\n\021UnlockR" + + "owResponse\"\232\001\n\024BulkLoadHFileRequest\022 \n\006r" + + "egion\030\001 \002(\0132\020.RegionSpecifier\0224\n\nfamilyP" + + "ath\030\002 \003(\0132 .BulkLoadHFileRequest.FamilyP" + + "ath\032*\n\nFamilyPath\022\016\n\006family\030\001 \002(\014\022\014\n\004pat" + + "h\030\002 \002(\t\"\'\n\025BulkLoadHFileResponse\022\016\n\006load" + + "ed\030\001 \002(\010\"\203\001\n\004Exec\022\013\n\003row\030\001 \002(\014\022\024\n\014protoc", + "olName\030\002 \002(\t\022\022\n\nmethodName\030\003 \002(\t\022!\n\010prop" + + "erty\030\004 \003(\0132\017.NameStringPair\022!\n\tparameter" + + "\030\005 \003(\0132\016.NameBytesPair\"O\n\026ExecCoprocesso" + + "rRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + + "er\022\023\n\004call\030\002 \002(\0132\005.Exec\"8\n\027ExecCoprocess" + + "orResponse\022\035\n\005value\030\001 \002(\0132\016.NameBytesPai" + + "r\"P\n\014ActionResult\022\035\n\005value\030\001 \001(\0132\016.NameB" + + "ytesPair\022!\n\texception\030\002 \001(\0132\016.NameBytesP" + + "air\"`\n\014MultiRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\036\n\006action\030\002 \003(\0132\016.NameByte", + "sPair\022\016\n\006atomic\030\003 \001(\010\".\n\rMultiResponse\022\035" + + "\n\006result\030\001 \003(\0132\r.ActionResult2\221\003\n\rClient" + + "Service\022 \n\003get\022\013.GetRequest\032\014.GetRespons" + + "e\022)\n\006mutate\022\016.MutateRequest\032\017.MutateResp" + + "onse\022#\n\004scan\022\014.ScanRequest\032\r.ScanRespons" + + "e\022,\n\007lockRow\022\017.LockRowRequest\032\020.LockRowR" + + "esponse\0222\n\tunlockRow\022\021.UnlockRowRequest\032" + + "\022.UnlockRowResponse\022>\n\rbulkLoadHFile\022\025.B" + + "ulkLoadHFileRequest\032\026.BulkLoadHFileRespo" + + "nse\022D\n\017execCoprocessor\022\027.ExecCoprocessor", + "Request\032\030.ExecCoprocessorResponse\022&\n\005mul" + + "ti\022\r.MultiRequest\032\016.MultiResponseBB\n*org" + + ".apache.hadoop.hbase.protobuf.generatedB" + + "\014ClientProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -21533,232 +20612,216 @@ public final class RegionClientProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Column_descriptor, new java.lang.String[] { "Family", "Qualifier", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder.class); - internal_static_Attribute_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_Attribute_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Attribute_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); internal_static_Get_descriptor = - getDescriptor().getMessageTypes().get(2); + getDescriptor().getMessageTypes().get(1); internal_static_Get_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Get_descriptor, new java.lang.String[] { "Row", "Column", "Attribute", "LockId", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); internal_static_Result_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(2); internal_static_Result_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Result_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder.class); + new java.lang.String[] { "KeyValueBytes", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); internal_static_GetRequest_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(3); internal_static_GetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRequest_descriptor, new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); internal_static_GetResponse_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(4); internal_static_GetResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetResponse_descriptor, new java.lang.String[] { "Result", "Exists", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); internal_static_Condition_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(5); internal_static_Condition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Condition_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder.class); + new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); internal_static_Mutate_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(6); internal_static_Mutate_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Mutate_descriptor, new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Attribute", "Timestamp", "LockId", "WriteToWAL", "TimeRange", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder.class); internal_static_Mutate_ColumnValue_descriptor = internal_static_Mutate_descriptor.getNestedTypes().get(0); internal_static_Mutate_ColumnValue_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Mutate_ColumnValue_descriptor, - new java.lang.String[] { "Family", "QualifierValue", "Timestamp", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder.class); + new java.lang.String[] { "Family", "QualifierValue", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder.class); internal_static_Mutate_ColumnValue_QualifierValue_descriptor = internal_static_Mutate_ColumnValue_descriptor.getNestedTypes().get(0); internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Mutate_ColumnValue_QualifierValue_descriptor, - new java.lang.String[] { "Qualifier", "Value", "Timestamp", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); + new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); internal_static_MutateRequest_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(7); internal_static_MutateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutateRequest_descriptor, new java.lang.String[] { "Region", "Mutate", "Condition", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); internal_static_MutateResponse_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(8); internal_static_MutateResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutateResponse_descriptor, new java.lang.String[] { "Result", "Processed", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); internal_static_Scan_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(9); internal_static_Scan_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Scan_descriptor, - new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "RowsToCache", "BatchSize", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder.class); + new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); internal_static_ScanRequest_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(10); internal_static_ScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanRequest_descriptor, - new java.lang.String[] { "ScannerId", "Scan", "NumberOfRows", "CloseScanner", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.Builder.class); + new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); internal_static_ScanResponse_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(11); internal_static_ScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanResponse_descriptor, new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); internal_static_LockRowRequest_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(12); internal_static_LockRowRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LockRowRequest_descriptor, new java.lang.String[] { "Region", "Row", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.Builder.class); internal_static_LockRowResponse_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(13); internal_static_LockRowResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LockRowResponse_descriptor, new java.lang.String[] { "LockId", "Ttl", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.Builder.class); internal_static_UnlockRowRequest_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(14); internal_static_UnlockRowRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnlockRowRequest_descriptor, new java.lang.String[] { "Region", "LockId", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.Builder.class); internal_static_UnlockRowResponse_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(15); internal_static_UnlockRowResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnlockRowResponse_descriptor, new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.Builder.class); internal_static_BulkLoadHFileRequest_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(16); internal_static_BulkLoadHFileRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileRequest_descriptor, new java.lang.String[] { "Region", "FamilyPath", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, new java.lang.String[] { "Family", "Path", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); internal_static_BulkLoadHFileResponse_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(17); internal_static_BulkLoadHFileResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileResponse_descriptor, new java.lang.String[] { "Loaded", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.Builder.class); - internal_static_Parameter_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_Parameter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Parameter_descriptor, - new java.lang.String[] { "Type", "BinaryValue", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder.class); - internal_static_Property_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_Property_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Property_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); internal_static_Exec_descriptor = - getDescriptor().getMessageTypes().get(21); + getDescriptor().getMessageTypes().get(18); internal_static_Exec_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Exec_descriptor, new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder.class); internal_static_ExecCoprocessorRequest_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(19); internal_static_ExecCoprocessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ExecCoprocessorRequest_descriptor, new java.lang.String[] { "Region", "Call", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.Builder.class); internal_static_ExecCoprocessorResponse_descriptor = - getDescriptor().getMessageTypes().get(23); + getDescriptor().getMessageTypes().get(20); internal_static_ExecCoprocessorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ExecCoprocessorResponse_descriptor, - new java.lang.String[] { "RegionName", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.Builder.class); + new java.lang.String[] { "Value", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.Builder.class); + internal_static_ActionResult_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_ActionResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ActionResult_descriptor, + new java.lang.String[] { "Value", "Exception", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); internal_static_MultiRequest_descriptor = - getDescriptor().getMessageTypes().get(24); + getDescriptor().getMessageTypes().get(22); internal_static_MultiRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiRequest_descriptor, - new java.lang.String[] { "Request", "Atomic", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.Builder.class); + new java.lang.String[] { "Region", "Action", "Atomic", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); internal_static_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(25); + getDescriptor().getMessageTypes().get(23); internal_static_MultiResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiResponse_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.Builder.class); + new java.lang.String[] { "Result", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); return null; } }; diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index 4026da04f99..efcf74d2f1b 100644 --- a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -3080,6 +3080,1018 @@ public final class HBaseProtos { // @@protoc_insertion_point(class_scope:ServerName) } + public interface NameStringPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // required string value = 2; + boolean hasValue(); + String getValue(); + } + public static final class NameStringPair extends + com.google.protobuf.GeneratedMessage + implements NameStringPairOrBuilder { + // Use NameStringPair.newBuilder() to construct. + private NameStringPair(Builder builder) { + super(builder); + } + private NameStringPair(boolean noInit) {} + + private static final NameStringPair defaultInstance; + public static NameStringPair getDefaultInstance() { + return defaultInstance; + } + + public NameStringPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private java.lang.Object value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + value_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + name_ = ""; + value_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getValueBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getValueBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // required string value = 2; + private java.lang.Object value_ = ""; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + value_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setValue(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + void setValue(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:NameStringPair) + } + + static { + defaultInstance = new NameStringPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NameStringPair) + } + + public interface NameBytesPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class NameBytesPair extends + com.google.protobuf.GeneratedMessage + implements NameBytesPairOrBuilder { + // Use NameBytesPair.newBuilder() to construct. + private NameBytesPair(Builder builder) { + super(builder); + } + private NameBytesPair(boolean noInit) {} + + private static final NameBytesPair defaultInstance; + public static NameBytesPair getDefaultInstance() { + return defaultInstance; + } + + public NameBytesPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + name_ = ""; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:NameBytesPair) + } + + static { + defaultInstance = new NameBytesPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NameBytesPair) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_RegionInfo_descriptor; private static @@ -3105,6 +4117,16 @@ public final class HBaseProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ServerName_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NameStringPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NameStringPair_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NameBytesPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NameBytesPair_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -3126,11 +4148,14 @@ public final class HBaseProtos { "(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004", "\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030\006 \001" + "(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n\004po" + - "rt\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004*_\n\007KeyType\022\013" + - "\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELE" + - "TE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMU" + - "M\020\377\001B>\n*org.apache.hadoop.hbase.protobuf" + - ".generatedB\013HBaseProtosH\001\240\001\001" + "rt\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004\"-\n\016NameStrin" + + "gPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNa" + + "meBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014" + + "*_\n\007KeyType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DEL" + + "ETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FAMIL" + + "Y\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*org.apache.hadoop.h" + + "base.protobuf.generatedB\013HBaseProtosH\001\240\001" + + "\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -3177,6 +4202,22 @@ public final class HBaseProtos { new java.lang.String[] { "HostName", "Port", "StartCode", }, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); + internal_static_NameStringPair_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_NameStringPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NameStringPair_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); + internal_static_NameBytesPair_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_NameBytesPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NameBytesPair_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); return null; } }; diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 9b34e61de45..0d22c0e0f47 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -40,11 +40,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Random; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -119,7 +117,7 @@ import org.apache.hadoop.hbase.ipc.Invocation; import org.apache.hadoop.hbase.ipc.ProtocolSignature; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler; import org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler; @@ -139,7 +137,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CompressionTest; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSTableDescriptors; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.InfoServer; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Sleeper; @@ -170,36 +167,22 @@ import com.google.common.collect.Lists; * the HMaster. There are many HRegionServers in a single HBase deployment. */ @InterfaceAudience.Private -public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, - Runnable, RegionServerServices { +public class HRegionServer extends RegionServer + implements HRegionInterface, HBaseRPCErrorHandler { public static final Log LOG = LogFactory.getLog(HRegionServer.class); - // Set when a report to the master comes back with a message asking us to - // shutdown. Also set by call to stop when debugging or running unit tests - // of HRegionServer in isolation. - protected volatile boolean stopped = false; - // A state before we go into stopped state. At this stage we're closing user // space regions. private boolean stopping = false; - // Go down hard. Used if file system becomes unavailable and also in - // debugging and unit tests. - protected volatile boolean abortRequested; - private volatile boolean killed = false; - // If false, the file system has become unavailable - protected volatile boolean fsOk; - protected final Configuration conf; protected final AtomicBoolean haveRootRegion = new AtomicBoolean(false); - private HFileSystem fs; private boolean useHBaseChecksum; // verify hbase checksums? private Path rootDir; - private final Random rand = new Random(); //RegionName vs current action in progress //true - if open region action in progress @@ -207,13 +190,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, private final ConcurrentSkipListMap regionsInTransitionInRS = new ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR); - /** - * Map of regions currently being served by this region server. Key is the - * encoded region name. All access should be synchronized. - */ - protected final Map onlineRegions = - new ConcurrentHashMap(); - protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); final int numRetries; @@ -222,8 +198,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, protected final int numRegionsToReport; - private final long maxScannerResultSize; - // Remote HMaster private HMasterRegionInterface hbaseMaster; @@ -233,13 +207,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, private final InetSocketAddress isa; - // Leases - private Leases leases; - - // Request counter. - // Do we need this? Can't we just sum region counters? St.Ack 20110412 - private AtomicInteger requestCount = new AtomicInteger(); - // Info server. Default access so can be used by unit tests. REGIONSERVER // is name of the webapp and the attribute name used stuffing this instance // into web context. @@ -263,9 +230,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, // Compactions public CompactSplitThread compactSplitThread; - // Cache flushing - MemStoreFlusher cacheFlusher; - /* * Check for compactions requests. */ @@ -279,9 +243,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, // flag set after we're done setting up server threads (used for testing) protected volatile boolean isOnline; - final Map scanners = - new ConcurrentHashMap(); - // zookeeper connection and watcher private ZooKeeperWatcher zooKeeper; @@ -405,8 +366,10 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, if (initialIsa.getAddress() == null) { throw new IllegalArgumentException("Failed resolve of " + initialIsa); } + this.rpcServer = HBaseRPC.getServer(this, - new Class[]{HRegionInterface.class, HBaseRPCErrorHandler.class, + new Class[]{HRegionInterface.class, ClientProtocol.class, + HBaseRPCErrorHandler.class, OnlineRegions.class}, initialIsa.getHostName(), // BindAddress is IP we got for this server. initialIsa.getPort(), @@ -445,12 +408,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - private static final int NORMAL_QOS = 0; - private static final int QOS_THRESHOLD = 10; // the line between low and high qos - private static final int HIGH_QOS = 100; - @Retention(RetentionPolicy.RUNTIME) - private @interface QosPriority { + protected @interface QosPriority { int priority() default 0; } @@ -648,7 +607,7 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, // Create the thread for the ThriftServer. if (conf.getBoolean("hbase.regionserver.export.thrift", false)) { - thriftServer = new HRegionThriftServer(this, conf); + thriftServer = new HRegionThriftServer((RegionServer)this, conf); thriftServer.start(); LOG.info("Started Thrift API from Region Server."); } @@ -1080,110 +1039,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return r != null ? createRegionLoad(r) : null; } - /* - * Cleanup after Throwable caught invoking method. Converts t to - * IOE if it isn't already. - * - * @param t Throwable - * - * @return Throwable converted to an IOE; methods can only let out IOEs. - */ - private Throwable cleanup(final Throwable t) { - return cleanup(t, null); - } - - /* - * Cleanup after Throwable caught invoking method. Converts t to - * IOE if it isn't already. - * - * @param t Throwable - * - * @param msg Message to log in error. Can be null. - * - * @return Throwable converted to an IOE; methods can only let out IOEs. - */ - private Throwable cleanup(final Throwable t, final String msg) { - // Don't log as error if NSRE; NSRE is 'normal' operation. - if (t instanceof NotServingRegionException) { - LOG.debug("NotServingRegionException; " + t.getMessage()); - return t; - } - if (msg == null) { - LOG.error("", RemoteExceptionHandler.checkThrowable(t)); - } else { - LOG.error(msg, RemoteExceptionHandler.checkThrowable(t)); - } - if (!checkOOME(t)) { - checkFileSystem(); - } - return t; - } - - /* - * @param t - * - * @return Make t an IOE if it isn't already. - */ - private IOException convertThrowableToIOE(final Throwable t) { - return convertThrowableToIOE(t, null); - } - - /* - * @param t - * - * @param msg Message to put in new IOE if passed t is not an IOE - * - * @return Make t an IOE if it isn't already. - */ - private IOException convertThrowableToIOE(final Throwable t, final String msg) { - return (t instanceof IOException ? (IOException) t : msg == null - || msg.length() == 0 ? new IOException(t) : new IOException(msg, t)); - } - - /* - * Check if an OOME and, if so, abort immediately to avoid creating more objects. - * - * @param e - * - * @return True if we OOME'd and are aborting. - */ - public boolean checkOOME(final Throwable e) { - boolean stop = false; - try { - if (e instanceof OutOfMemoryError - || (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) - || (e.getMessage() != null && e.getMessage().contains( - "java.lang.OutOfMemoryError"))) { - stop = true; - LOG.fatal( - "Run out of memory; HRegionServer will abort itself immediately", e); - } - } finally { - if (stop) { - Runtime.getRuntime().halt(1); - } - } - return stop; - } - - /** - * Checks to see if the file system is still accessible. If not, sets - * abortRequested and stopRequested - * - * @return false if file system is not available - */ - public boolean checkFileSystem() { - if (this.fsOk && this.fs != null) { - try { - FSUtils.checkFileSystemAvailable(this.fs); - } catch (IOException e) { - abort("File System not available", e); - this.fsOk = false; - } - } - return this.fsOk; - } - /* * Inner class that runs on a long period checking if regions need compaction. */ @@ -2334,15 +2189,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - protected long addScanner(RegionScanner s) throws LeaseStillHeldException { - long scannerId = -1L; - scannerId = rand.nextLong(); - String scannerName = String.valueOf(scannerId); - scanners.put(scannerName, s); - this.leases.createLease(scannerName, new ScannerListener(scannerName)); - return scannerId; - } - public Result next(final long scannerId) throws IOException { Result[] res = next(scannerId, 1); if (res == null || res.length == 0) { @@ -2468,42 +2314,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - /** - * Instantiated as a scanner lease. If the lease times out, the scanner is - * closed - */ - private class ScannerListener implements LeaseListener { - private final String scannerName; - - ScannerListener(final String n) { - this.scannerName = n; - } - - public void leaseExpired() { - RegionScanner s = scanners.remove(this.scannerName); - if (s != null) { - LOG.info("Scanner " + this.scannerName + " lease expired on region " - + s.getRegionInfo().getRegionNameAsString()); - try { - HRegion region = getRegion(s.getRegionInfo().getRegionName()); - if (region != null && region.getCoprocessorHost() != null) { - region.getCoprocessorHost().preScannerClose(s); - } - - s.close(); - if (region != null && region.getCoprocessorHost() != null) { - region.getCoprocessorHost().postScannerClose(s); - } - } catch (IOException e) { - LOG.error("Closing scanner for " - + s.getRegionInfo().getRegionNameAsString(), e); - } - } else { - LOG.info("Scanner " + this.scannerName + " lease expired"); - } - } - } - // // Methods that do the actual work for the remote API // @@ -2585,39 +2395,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - protected long addRowLock(Integer r, HRegion region) - throws LeaseStillHeldException { - long lockId = -1L; - lockId = rand.nextLong(); - String lockName = String.valueOf(lockId); - rowlocks.put(lockName, r); - this.leases.createLease(lockName, new RowLockListener(lockName, region)); - return lockId; - } - - /** - * Method to get the Integer lock identifier used internally from the long - * lock identifier used by the client. - * - * @param lockId - * long row lock identifier from client - * @return intId Integer row lock used internally in HRegion - * @throws IOException - * Thrown if this is not a valid client lock id. - */ - Integer getLockFromId(long lockId) throws IOException { - if (lockId == -1L) { - return null; - } - String lockName = String.valueOf(lockId); - Integer rl = rowlocks.get(lockName); - if (rl == null) { - throw new UnknownRowLockException("Invalid row lock"); - } - this.leases.renewLease(lockName); - return rl; - } - @Override @QosPriority(priority=HIGH_QOS) public void unlockRow(byte[] regionName, long lockId) throws IOException { @@ -2663,30 +2440,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return region.bulkLoadHFiles(familyPaths); } - Map rowlocks = new ConcurrentHashMap(); - - /** - * Instantiated as a row lock lease. If the lease times out, the row lock is - * released - */ - private class RowLockListener implements LeaseListener { - private final String lockName; - private final HRegion region; - - RowLockListener(final String lockName, final HRegion region) { - this.lockName = lockName; - this.region = region; - } - - public void leaseExpired() { - LOG.info("Row Lock " + this.lockName + " lease expired"); - Integer r = rowlocks.remove(this.lockName); - if (r != null) { - region.releaseRowLock(r); - } - } - } - // Region open/close direct RPCs @Override @@ -3057,15 +2810,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return r; } - /** - * @param regionName - * @return HRegion for the passed binary regionName or null if - * named region is not member of the online regions. - */ - public HRegion getOnlineRegion(final byte[] regionName) { - return getFromOnlineRegions(HRegionInfo.encodeRegionName(regionName)); - } - /** @return the request count */ public AtomicInteger getRequestCount() { return this.requestCount; @@ -3083,25 +2827,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return this.cacheFlusher; } - /** - * Protected utility method for safely obtaining an HRegion handle. - * - * @param regionName - * Name of online {@link HRegion} to return - * @return {@link HRegion} for regionName - * @throws NotServingRegionException - */ - protected HRegion getRegion(final byte[] regionName) - throws NotServingRegionException { - HRegion region = null; - region = getOnlineRegion(regionName); - if (region == null) { - throw new NotServingRegionException("Region is not online: " + - Bytes.toStringBinary(regionName)); - } - return region; - } - /** * Get the top N most loaded regions this server is serving so we can tell the * master which regions it can reallocate if we're overloaded. TODO: actually @@ -3123,21 +2848,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return regions.toArray(new HRegionInfo[regions.size()]); } - /** - * Called to verify that this server is up and running. - * - * @throws IOException - */ - protected void checkOpen() throws IOException { - if (this.stopped || this.abortRequested) { - throw new RegionServerStoppedException("Server " + getServerName() + - " not running" + (this.abortRequested ? ", aborting" : "")); - } - if (!fsOk) { - throw new RegionServerStoppedException("File system not available"); - } - } - @Override @QosPriority(priority=HIGH_QOS) public ProtocolSignature getProtocolSignature( @@ -3145,6 +2855,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, throws IOException { if (protocol.equals(HRegionInterface.class.getName())) { return new ProtocolSignature(HRegionInterface.VERSION, null); + } else if (protocol.equals(ClientProtocol.class.getName())) { + return new ProtocolSignature(ClientProtocol.VERSION, null); } throw new IOException("Unknown protocol: " + protocol); } @@ -3155,6 +2867,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, throws IOException { if (protocol.equals(HRegionInterface.class.getName())) { return HRegionInterface.VERSION; + } else if (protocol.equals(ClientProtocol.class.getName())) { + return ClientProtocol.VERSION; } throw new IOException("Unknown protocol: " + protocol); } @@ -3687,4 +3401,4 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, mxBeanInfo); LOG.info("Registered RegionServer MXBean"); } -} \ No newline at end of file +} diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java index 703e73d2e7f..759633d4f49 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java @@ -36,11 +36,17 @@ import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.thrift.ThriftServerRunner; import org.apache.hadoop.hbase.thrift.ThriftUtilities; import org.apache.hadoop.hbase.thrift.generated.IOError; import org.apache.hadoop.hbase.thrift.generated.TRowResult; +import com.google.protobuf.ServiceException; + /** * HRegionThriftServer - this class starts up a Thrift server in the same * JVM where the RegionServer is running. It inherits most of the @@ -56,14 +62,14 @@ public class HRegionThriftServer extends Thread { public static final Log LOG = LogFactory.getLog(HRegionThriftServer.class); - private final HRegionServer rs; + private final RegionServer rs; private final ThriftServerRunner serverRunner; /** * Create an instance of the glue object that connects the * RegionServer with the standard ThriftServer implementation */ - HRegionThriftServer(HRegionServer regionServer, Configuration conf) + HRegionThriftServer(RegionServer regionServer, Configuration conf) throws IOException { super("Region Thrift Server"); this.rs = regionServer; @@ -130,7 +136,10 @@ public class HRegionThriftServer extends Thread { if (columns == null) { Get get = new Get(row); get.setTimeRange(Long.MIN_VALUE, timestamp); - Result result = rs.get(regionName, get); + GetRequest request = + RequestConverter.buildGetRequest(regionName, get); + GetResponse response = rs.get(null, request); + Result result = ProtobufUtil.toResult(response.getResult()); return ThriftUtilities.rowResultFromHBase(result); } Get get = new Get(row); @@ -143,7 +152,10 @@ public class HRegionThriftServer extends Thread { } } get.setTimeRange(Long.MIN_VALUE, timestamp); - Result result = rs.get(regionName, get); + GetRequest request = + RequestConverter.buildGetRequest(regionName, get); + GetResponse response = rs.get(null, request); + Result result = ProtobufUtil.toResult(response.getResult()); return ThriftUtilities.rowResultFromHBase(result); } catch (NotServingRegionException e) { if (!redirect) { @@ -153,6 +165,10 @@ public class HRegionThriftServer extends Thread { LOG.debug("ThriftServer redirecting getRowWithColumnsTs"); return super.getRowWithColumnsTs(tableName, rowb, columns, timestamp, attributes); + } catch (ServiceException se) { + IOException e = ProtobufUtil.getRemoteException(se); + LOG.warn(e.getMessage(), e); + throw new IOError(e.getMessage()); } catch (IOException e) { LOG.warn(e.getMessage(), e); throw new IOError(e.getMessage()); diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java b/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java index b520f3ff040..0b7ed0e3861 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java @@ -55,7 +55,7 @@ import java.io.IOException; @InterfaceAudience.Private public class Leases extends HasThread { private static final Log LOG = LogFactory.getLog(Leases.class.getName()); - private final int leasePeriod; + protected final int leasePeriod; private final int leaseCheckFrequency; private volatile DelayQueue leaseQueue = new DelayQueue(); protected final Map leases = new HashMap(); diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java new file mode 100644 index 00000000000..9487a1c4e3d --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java @@ -0,0 +1,1164 @@ +/** + * Copyright 2010 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.regionserver; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.RemoteExceptionHandler; +import org.apache.hadoop.hbase.UnknownRowLockException; +import org.apache.hadoop.hbase.UnknownScannerException; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Exec; +import org.apache.hadoop.hbase.client.coprocessor.ExecResult; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; +import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.regionserver.HRegionServer.QosPriority; +import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.util.Pair; + +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + +/** + * RegionServer makes a set of HRegions available to clients. It checks in with + * the HMaster. There are many RegionServers in a single HBase deployment. + * + * This will be a replacement for the HRegionServer. It has protobuf protocols + * implementations. All the HRegionInterface implementations stay in HRegionServer + * for possible backward compatibility requests. This also makes it easier to + * rip of HRegionInterface later on. + */ +@InterfaceAudience.Private +public abstract class RegionServer implements + ClientProtocol, Runnable, RegionServerServices { + + private static final Log LOG = LogFactory.getLog(RegionServer.class); + + private final Random rand = new Random(); + + protected long maxScannerResultSize; + + // Cache flushing + protected MemStoreFlusher cacheFlusher; + + final Map scanners = + new ConcurrentHashMap(); + + /** + * Map of regions currently being served by this region server. Key is the + * encoded region name. All access should be synchronized. + */ + protected final Map onlineRegions = + new ConcurrentHashMap(); + + // Leases + protected Leases leases; + + // Request counter. + // Do we need this? Can't we just sum region counters? St.Ack 20110412 + protected AtomicInteger requestCount = new AtomicInteger(); + + // If false, the file system has become unavailable + protected volatile boolean fsOk; + protected HFileSystem fs; + + protected static final int NORMAL_QOS = 0; + protected static final int QOS_THRESHOLD = 10; // the line between low and high qos + protected static final int HIGH_QOS = 100; + + // Set when a report to the master comes back with a message asking us to + // shutdown. Also set by call to stop when debugging or running unit tests + // of HRegionServer in isolation. + protected volatile boolean stopped = false; + + // Go down hard. Used if file system becomes unavailable and also in + // debugging and unit tests. + protected volatile boolean abortRequested; + + Map rowlocks = new ConcurrentHashMap(); + + /** + * Instantiated as a row lock lease. If the lease times out, the row lock is + * released + */ + private class RowLockListener implements LeaseListener { + private final String lockName; + private final HRegion region; + + RowLockListener(final String lockName, final HRegion region) { + this.lockName = lockName; + this.region = region; + } + + public void leaseExpired() { + LOG.info("Row Lock " + this.lockName + " lease expired"); + Integer r = rowlocks.remove(this.lockName); + if (r != null) { + region.releaseRowLock(r); + } + } + } + + /** + * Instantiated as a scanner lease. If the lease times out, the scanner is + * closed + */ + private class ScannerListener implements LeaseListener { + private final String scannerName; + + ScannerListener(final String n) { + this.scannerName = n; + } + + public void leaseExpired() { + RegionScanner s = scanners.remove(this.scannerName); + if (s != null) { + LOG.info("Scanner " + this.scannerName + " lease expired on region " + + s.getRegionInfo().getRegionNameAsString()); + try { + HRegion region = getRegion(s.getRegionInfo().getRegionName()); + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().preScannerClose(s); + } + + s.close(); + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postScannerClose(s); + } + } catch (IOException e) { + LOG.error("Closing scanner for " + + s.getRegionInfo().getRegionNameAsString(), e); + } + } else { + LOG.info("Scanner " + this.scannerName + " lease expired"); + } + } + } + + /** + * Method to get the Integer lock identifier used internally from the long + * lock identifier used by the client. + * + * @param lockId + * long row lock identifier from client + * @return intId Integer row lock used internally in HRegion + * @throws IOException + * Thrown if this is not a valid client lock id. + */ + Integer getLockFromId(long lockId) throws IOException { + if (lockId == -1L) { + return null; + } + String lockName = String.valueOf(lockId); + Integer rl = rowlocks.get(lockName); + if (rl == null) { + throw new UnknownRowLockException("Invalid row lock"); + } + this.leases.renewLease(lockName); + return rl; + } + + /** + * Called to verify that this server is up and running. + * + * @throws IOException + */ + protected void checkOpen() throws IOException { + if (this.stopped || this.abortRequested) { + throw new RegionServerStoppedException("Server " + getServerName() + + " not running" + (this.abortRequested ? ", aborting" : "")); + } + if (!fsOk) { + throw new RegionServerStoppedException("File system not available"); + } + } + + /** + * @param regionName + * @return HRegion for the passed binary regionName or null if + * named region is not member of the online regions. + */ + public HRegion getOnlineRegion(final byte[] regionName) { + String encodedRegionName = HRegionInfo.encodeRegionName(regionName); + return this.onlineRegions.get(encodedRegionName); + } + + /** + * Protected utility method for safely obtaining an HRegion handle. + * + * @param regionName + * Name of online {@link HRegion} to return + * @return {@link HRegion} for regionName + * @throws NotServingRegionException + */ + protected HRegion getRegion(final byte[] regionName) + throws NotServingRegionException { + HRegion region = null; + region = getOnlineRegion(regionName); + if (region == null) { + throw new NotServingRegionException("Region is not online: " + + Bytes.toStringBinary(regionName)); + } + return region; + } + + /* + * Cleanup after Throwable caught invoking method. Converts t to + * IOE if it isn't already. + * + * @param t Throwable + * + * @return Throwable converted to an IOE; methods can only let out IOEs. + */ + protected Throwable cleanup(final Throwable t) { + return cleanup(t, null); + } + + /* + * Cleanup after Throwable caught invoking method. Converts t to + * IOE if it isn't already. + * + * @param t Throwable + * + * @param msg Message to log in error. Can be null. + * + * @return Throwable converted to an IOE; methods can only let out IOEs. + */ + protected Throwable cleanup(final Throwable t, final String msg) { + // Don't log as error if NSRE; NSRE is 'normal' operation. + if (t instanceof NotServingRegionException) { + LOG.debug("NotServingRegionException; " + t.getMessage()); + return t; + } + if (msg == null) { + LOG.error("", RemoteExceptionHandler.checkThrowable(t)); + } else { + LOG.error(msg, RemoteExceptionHandler.checkThrowable(t)); + } + if (!checkOOME(t)) { + checkFileSystem(); + } + return t; + } + + /* + * @param t + * + * @return Make t an IOE if it isn't already. + */ + protected IOException convertThrowableToIOE(final Throwable t) { + return convertThrowableToIOE(t, null); + } + + /* + * @param t + * + * @param msg Message to put in new IOE if passed t is not an IOE + * + * @return Make t an IOE if it isn't already. + */ + protected IOException convertThrowableToIOE(final Throwable t, final String msg) { + return (t instanceof IOException ? (IOException) t : msg == null + || msg.length() == 0 ? new IOException(t) : new IOException(msg, t)); + } + + /* + * Check if an OOME and, if so, abort immediately to avoid creating more objects. + * + * @param e + * + * @return True if we OOME'd and are aborting. + */ + public boolean checkOOME(final Throwable e) { + boolean stop = false; + try { + if (e instanceof OutOfMemoryError + || (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) + || (e.getMessage() != null && e.getMessage().contains( + "java.lang.OutOfMemoryError"))) { + stop = true; + LOG.fatal( + "Run out of memory; HRegionServer will abort itself immediately", e); + } + } finally { + if (stop) { + Runtime.getRuntime().halt(1); + } + } + return stop; + } + + /** + * Checks to see if the file system is still accessible. If not, sets + * abortRequested and stopRequested + * + * @return false if file system is not available + */ + public boolean checkFileSystem() { + if (this.fsOk && this.fs != null) { + try { + FSUtils.checkFileSystemAvailable(this.fs); + } catch (IOException e) { + abort("File System not available", e); + this.fsOk = false; + } + } + return this.fsOk; + } + + protected long addRowLock(Integer r, HRegion region) + throws LeaseStillHeldException { + long lockId = nextLong(); + String lockName = String.valueOf(lockId); + rowlocks.put(lockName, r); + this.leases.createLease(lockName, new RowLockListener(lockName, region)); + return lockId; + } + + protected long addScanner(RegionScanner s) throws LeaseStillHeldException { + long scannerId = nextLong(); + String scannerName = String.valueOf(scannerId); + scanners.put(scannerName, s); + this.leases.createLease(scannerName, new ScannerListener(scannerName)); + return scannerId; + } + + /** + * Generate a random positive long number + * + * @return a random positive long number + */ + protected long nextLong() { + long n = rand.nextLong(); + if (n == 0) { + return nextLong(); + } + if (n < 0) { + n = -n; + } + return n; + } + + // Start Client methods + + /** + * Get data from a table. + * + * @param controller the RPC controller + * @param request the get request + * @throws ServiceException + */ + @Override + public GetResponse get(final RpcController controller, + final GetRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + GetResponse.Builder builder = GetResponse.newBuilder(); + ClientProtos.Get get = request.getGet(); + Boolean existence = null; + Result r = null; + if (request.getClosestRowBefore()) { + if (get.getColumnCount() != 1) { + throw new DoNotRetryIOException( + "get ClosestRowBefore supports one and only one family now, not " + + get.getColumnCount() + " families"); + } + byte[] row = get.getRow().toByteArray(); + byte[] family = get.getColumn(0).getFamily().toByteArray(); + r = region.getClosestRowBefore(row, family); + } else { + Get clientGet = ProtobufUtil.toGet(get); + if (request.getExistenceOnly() && region.getCoprocessorHost() != null) { + existence = region.getCoprocessorHost().preExists(clientGet); + } + if (existence == null) { + Integer lock = getLockFromId(clientGet.getLockId()); + r = region.get(clientGet, lock); + if (request.getExistenceOnly()) { + boolean exists = r != null && !r.isEmpty(); + if (region.getCoprocessorHost() != null) { + exists = region.getCoprocessorHost().postExists(clientGet, exists); + } + existence = Boolean.valueOf(exists); + } + } + } + if (existence != null) { + builder.setExists(existence.booleanValue()); + } else if (r != null) { + builder.setResult(ProtobufUtil.toResult(r)); + } + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Mutate data in a table. + * + * @param controller the RPC controller + * @param request the mutate request + * @throws ServiceException + */ + @Override + public MutateResponse mutate(final RpcController controller, + final MutateRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + MutateResponse.Builder builder = MutateResponse.newBuilder(); + Mutate mutate = request.getMutate(); + if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + Integer lock = null; + Result r = null; + Boolean processed = null; + MutateType type = mutate.getMutateType(); + switch (type) { + case APPEND: + r = append(region, mutate); + break; + case INCREMENT: + r = increment(region, mutate); + break; + case PUT: + Put put = ProtobufUtil.toPut(mutate); + lock = getLockFromId(put.getLockId()); + if (request.hasCondition()) { + Condition condition = request.getCondition(); + byte[] row = condition.getRow().toByteArray(); + byte[] family = condition.getFamily().toByteArray(); + byte[] qualifier = condition.getQualifier().toByteArray(); + CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); + WritableByteArrayComparable comparator = + (WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator()); + if (region.getCoprocessorHost() != null) { + processed = region.getCoprocessorHost().preCheckAndPut( + row, family, qualifier, compareOp, comparator, put); + } + if (processed == null) { + boolean result = region.checkAndMutate(row, family, + qualifier, compareOp, comparator, put, lock, true); + if (region.getCoprocessorHost() != null) { + result = region.getCoprocessorHost().postCheckAndPut(row, family, + qualifier, compareOp, comparator, put, result); + } + processed = Boolean.valueOf(result); + } + } else { + region.put(put, lock); + processed = Boolean.TRUE; + } + break; + case DELETE: + Delete delete = ProtobufUtil.toDelete(mutate); + lock = getLockFromId(delete.getLockId()); + if (request.hasCondition()) { + Condition condition = request.getCondition(); + byte[] row = condition.getRow().toByteArray(); + byte[] family = condition.getFamily().toByteArray(); + byte[] qualifier = condition.getQualifier().toByteArray(); + CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); + WritableByteArrayComparable comparator = + (WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator()); + if (region.getCoprocessorHost() != null) { + processed = region.getCoprocessorHost().preCheckAndDelete( + row, family, qualifier, compareOp, comparator, delete); + } + if (processed == null) { + boolean result = region.checkAndMutate(row, family, + qualifier, compareOp, comparator, delete, lock, true); + if (region.getCoprocessorHost() != null) { + result = region.getCoprocessorHost().postCheckAndDelete(row, family, + qualifier, compareOp, comparator, delete, result); + } + processed = Boolean.valueOf(result); + } + } else { + region.delete(delete, lock, delete.getWriteToWAL()); + processed = Boolean.TRUE; + } + break; + default: + throw new DoNotRetryIOException( + "Unsupported mutate type: " + type.name()); + } + if (processed != null) { + builder.setProcessed(processed.booleanValue()); + } else if (r != null) { + builder.setResult(ProtobufUtil.toResult(r)); + } + return builder.build(); + } catch (IOException ie) { + checkFileSystem(); + throw new ServiceException(ie); + } + } + + // + // remote scanner interface + // + + /** + * Scan data in a table. + * + * @param controller the RPC controller + * @param request the scan request + * @throws ServiceException + */ + @Override + public ScanResponse scan(final RpcController controller, + final ScanRequest request) throws ServiceException { + Leases.Lease lease = null; + String scannerName = null; + try { + if (!request.hasScannerId() && !request.hasScan()) { + throw new DoNotRetryIOException( + "Missing required input: scannerId or scan"); + } + long scannerId = -1; + if (request.hasScannerId()) { + scannerId = request.getScannerId(); + scannerName = String.valueOf(scannerId); + } + try { + checkOpen(); + } catch (IOException e) { + // If checkOpen failed, server not running or filesystem gone, + // cancel this lease; filesystem is gone or we're closing or something. + if (scannerName != null) { + try { + leases.cancelLease(scannerName); + } catch (LeaseException le) { + LOG.info("Server shutting down and client tried to access missing scanner " + + scannerName); + } + } + throw e; + } + requestCount.incrementAndGet(); + + try { + int ttl = 0; + HRegion region = null; + RegionScanner scanner = null; + boolean moreResults = true; + boolean closeScanner = false; + ScanResponse.Builder builder = ScanResponse.newBuilder(); + if (request.hasCloseScanner()) { + closeScanner = request.getCloseScanner(); + } + int rows = 1; + if (request.hasNumberOfRows()) { + rows = request.getNumberOfRows(); + } + if (request.hasScannerId()) { + scanner = scanners.get(scannerName); + if (scanner == null) { + throw new UnknownScannerException( + "Name: " + scannerName + ", already closed?"); + } + region = getRegion(scanner.getRegionInfo().getRegionName()); + } else { + region = getRegion(request.getRegion()); + ClientProtos.Scan protoScan = request.getScan(); + Scan scan = ProtobufUtil.toScan(protoScan); + region.prepareScanner(scan); + if (region.getCoprocessorHost() != null) { + scanner = region.getCoprocessorHost().preScannerOpen(scan); + } + if (scanner == null) { + scanner = region.getScanner(scan); + } + if (region.getCoprocessorHost() != null) { + scanner = region.getCoprocessorHost().postScannerOpen(scan, scanner); + } + scannerId = addScanner(scanner); + scannerName = String.valueOf(scannerId); + ttl = leases.leasePeriod; + } + + if (rows > 0) { + try { + // Remove lease while its being processed in server; protects against case + // where processing of request takes > lease expiration time. + lease = leases.removeLease(scannerName); + List results = new ArrayList(rows); + long currentScanResultSize = 0; + + boolean done = false; + // Call coprocessor. Get region info from scanner. + if (region != null && region.getCoprocessorHost() != null) { + Boolean bypass = region.getCoprocessorHost().preScannerNext( + scanner, results, rows); + if (!results.isEmpty()) { + for (Result r : results) { + for (KeyValue kv : r.raw()) { + currentScanResultSize += kv.heapSize(); + } + } + } + if (bypass != null && bypass.booleanValue()) { + done = true; + } + } + + if (!done) { + List values = new ArrayList(); + for (int i = 0; i < rows + && currentScanResultSize < maxScannerResultSize; i++) { + // Collect values to be returned here + boolean moreRows = scanner.next(values, HRegion.METRIC_NEXTSIZE); + if (!values.isEmpty()) { + for (KeyValue kv : values) { + currentScanResultSize += kv.heapSize(); + } + results.add(new Result(values)); + } + if (!moreRows) { + break; + } + values.clear(); + } + + // coprocessor postNext hook + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postScannerNext(scanner, results, rows, true); + } + } + + // If the scanner's filter - if any - is done with the scan + // and wants to tell the client to stop the scan. This is done by passing + // a null result, and setting moreResults to false. + if (scanner.isFilterDone() && results.isEmpty()) { + moreResults = false; + results = null; + } else { + for (Result result: results) { + if (result != null) { + builder.addResult(ProtobufUtil.toResult(result)); + } + } + } + } finally { + // We're done. On way out re-add the above removed lease. + // Adding resets expiration time on lease. + if (scanners.containsKey(scannerName)) { + if (lease != null) leases.addLease(lease); + ttl = leases.leasePeriod; + } + } + } + + if (!moreResults || closeScanner) { + ttl = 0; + moreResults = false; + if (region != null && region.getCoprocessorHost() != null) { + if (region.getCoprocessorHost().preScannerClose(scanner)) { + return builder.build(); // bypass + } + } + scanner = scanners.remove(scannerName); + if (scanner != null) { + scanner.close(); + leases.cancelLease(scannerName); + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postScannerClose(scanner); + } + } + } + + if (ttl > 0) { + builder.setTtl(ttl); + } + builder.setScannerId(scannerId); + builder.setMoreResults(moreResults); + return builder.build(); + } catch (Throwable t) { + if (scannerName != null && + t instanceof NotServingRegionException) { + scanners.remove(scannerName); + } + throw convertThrowableToIOE(cleanup(t)); + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Lock a row in a table. + * + * @param controller the RPC controller + * @param request the lock row request + * @throws ServiceException + */ + @Override + public LockRowResponse lockRow(final RpcController controller, + final LockRowRequest request) throws ServiceException { + try { + if (request.getRowCount() != 1) { + throw new DoNotRetryIOException( + "lockRow supports only one row now, not " + request.getRowCount() + " rows"); + } + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + byte[] row = request.getRow(0).toByteArray(); + try { + Integer r = region.obtainRowLock(row); + long lockId = addRowLock(r, region); + LOG.debug("Row lock " + lockId + " explicitly acquired by client"); + LockRowResponse.Builder builder = LockRowResponse.newBuilder(); + builder.setLockId(lockId); + return builder.build(); + } catch (Throwable t) { + throw convertThrowableToIOE(cleanup(t, + "Error obtaining row lock (fsOk: " + this.fsOk + ")")); + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Unlock a locked row in a table. + * + * @param controller the RPC controller + * @param request the unlock row request + * @throws ServiceException + */ + @Override + @QosPriority(priority=HIGH_QOS) + public UnlockRowResponse unlockRow(final RpcController controller, + final UnlockRowRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + if (!request.hasLockId()) { + throw new DoNotRetryIOException( + "Invalid unlock rowrequest, missing lock id"); + } + long lockId = request.getLockId(); + String lockName = String.valueOf(lockId); + try { + Integer r = rowlocks.remove(lockName); + if (r == null) { + throw new UnknownRowLockException(lockName); + } + region.releaseRowLock(r); + this.leases.cancelLease(lockName); + LOG.debug("Row lock " + lockId + + " has been explicitly released by client"); + return UnlockRowResponse.newBuilder().build(); + } catch (Throwable t) { + throw convertThrowableToIOE(cleanup(t)); + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Atomically bulk load several HFiles into an open region + * @return true if successful, false is failed but recoverably (no action) + * @throws IOException if failed unrecoverably + */ + @Override + public BulkLoadHFileResponse bulkLoadHFile(final RpcController controller, + final BulkLoadHFileRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + List> familyPaths = new ArrayList>(); + for (FamilyPath familyPath: request.getFamilyPathList()) { + familyPaths.add(new Pair( + familyPath.getFamily().toByteArray(), familyPath.getPath())); + } + boolean loaded = region.bulkLoadHFiles(familyPaths); + BulkLoadHFileResponse.Builder builder = BulkLoadHFileResponse.newBuilder(); + builder.setLoaded(loaded); + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} + * method using the registered protocol handlers. + * {@link CoprocessorProtocol} implementations must be registered per-region + * via the + * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} + * method before they are available. + * + * @param regionName name of the region against which the invocation is executed + * @param call an {@code Exec} instance identifying the protocol, method name, + * and parameters for the method invocation + * @return an {@code ExecResult} instance containing the region name of the + * invocation and the return value + * @throws IOException if no registered protocol handler is found or an error + * occurs during the invocation + * @see org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol) + */ + @Override + public ExecCoprocessorResponse execCoprocessor(final RpcController controller, + final ExecCoprocessorRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + ExecCoprocessorResponse.Builder + builder = ExecCoprocessorResponse.newBuilder(); + ClientProtos.Exec call = request.getCall(); + Exec clientCall = ProtobufUtil.toExec(call); + ExecResult result = region.exec(clientCall); + builder.setValue(ProtobufUtil.toParameter(result.getValue())); + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Execute multiple actions on a table: get, mutate, and/or execCoprocessor + * + * @param controller the RPC controller + * @param request the multi request + * @throws ServiceException + */ + @Override + public MultiResponse multi(final RpcController controller, + final MultiRequest request) throws ServiceException { + try { + HRegion region = getRegion(request.getRegion()); + MultiResponse.Builder builder = MultiResponse.newBuilder(); + if (request.hasAtomic() && request.getAtomic()) { + List mutates = new ArrayList(); + for (NameBytesPair parameter: request.getActionList()) { + Object action = ProtobufUtil.toObject(parameter); + if (action instanceof Mutate) { + mutates.add((Mutate)action); + } else { + throw new DoNotRetryIOException( + "Unsupported atomic atction type: " + + action.getClass().getName()); + } + } + mutateRows(region, mutates); + } else { + ActionResult.Builder resultBuilder = null; + List puts = new ArrayList(); + for (NameBytesPair parameter: request.getActionList()) { + requestCount.incrementAndGet(); + try { + Object result = null; + Object action = ProtobufUtil.toObject(parameter); + if (action instanceof ClientProtos.Get) { + Get get = ProtobufUtil.toGet((ClientProtos.Get)action); + Integer lock = getLockFromId(get.getLockId()); + Result r = region.get(get, lock); + if (r != null) { + result = ProtobufUtil.toResult(r); + } + } else if (action instanceof Mutate) { + Mutate mutate = (Mutate)action; + MutateType type = mutate.getMutateType(); + if (type != MutateType.PUT) { + if (!puts.isEmpty()) { + put(builder, region, puts); + puts.clear(); + } else if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + } + Result r = null; + switch (type) { + case APPEND: + r = append(region, mutate); + break; + case INCREMENT: + r = increment(region, mutate); + break; + case PUT: + puts.add(mutate); + break; + case DELETE: + Delete delete = ProtobufUtil.toDelete(mutate); + Integer lock = getLockFromId(delete.getLockId()); + region.delete(delete, lock, delete.getWriteToWAL()); + r = new Result(); + break; + default: + throw new DoNotRetryIOException( + "Unsupported mutate type: " + type.name()); + } + if (r != null) { + result = ProtobufUtil.toResult(r); + } + } else if (action instanceof ClientProtos.Exec) { + Exec call = ProtobufUtil.toExec((ClientProtos.Exec)action); + result = region.exec(call).getValue(); + } else { + LOG.debug("Error: invalid action, " + + "it must be a Get, Mutate, or Exec."); + throw new DoNotRetryIOException("Invalid action, " + + "it must be a Get, Mutate, or Exec."); + } + if (result != null) { + if (resultBuilder == null) { + resultBuilder = ActionResult.newBuilder(); + } else { + resultBuilder.clear(); + } + NameBytesPair value = ProtobufUtil.toParameter(result); + resultBuilder.setValue(value); + builder.addResult(resultBuilder.build()); + } + } catch (IOException ie) { + builder.addResult(ResponseConverter.buildActionResult(ie)); + } + } + if (!puts.isEmpty()) { + put(builder, region, puts); + } + } + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + +// End Client methods + + /** + * Find the HRegion based on a region specifier + * + * @param regionSpecifier the region specifier + * @return the corresponding region + * @throws IOException if the specifier is not null, + * but failed to find the region + */ + protected HRegion getRegion( + final RegionSpecifier regionSpecifier) throws IOException { + byte[] value = regionSpecifier.getValue().toByteArray(); + RegionSpecifierType type = regionSpecifier.getType(); + checkOpen(); + switch (type) { + case REGION_NAME: + return getRegion(value); + case ENCODED_REGION_NAME: + String encodedRegionName = Bytes.toString(value); + HRegion region = this.onlineRegions.get(encodedRegionName); + if (region == null) { + throw new NotServingRegionException( + "Region is not online: " + encodedRegionName); + } + return region; + default: + throw new DoNotRetryIOException( + "Unsupported region specifier type: " + type); + } + } + + /** + * Execute an append mutation. + * + * @param region + * @param mutate + * @return + * @throws IOException + */ + protected Result append(final HRegion region, + final Mutate mutate) throws IOException { + Append append = ProtobufUtil.toAppend(mutate); + Result r = null; + if (region.getCoprocessorHost() != null) { + r = region.getCoprocessorHost().preAppend(append); + } + if (r == null) { + Integer lock = getLockFromId(append.getLockId()); + r = region.append(append, lock, append.getWriteToWAL()); + if (region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postAppend(append, r); + } + } + return r; + } + + /** + * Execute an increment mutation. + * + * @param region + * @param mutate + * @return + * @throws IOException + */ + protected Result increment(final HRegion region, + final Mutate mutate) throws IOException { + Increment increment = ProtobufUtil.toIncrement(mutate); + Result r = null; + if (region.getCoprocessorHost() != null) { + r = region.getCoprocessorHost().preIncrement(increment); + } + if (r == null) { + Integer lock = getLockFromId(increment.getLockId()); + r = region.increment(increment, lock, increment.getWriteToWAL()); + if (region.getCoprocessorHost() != null) { + r = region.getCoprocessorHost().postIncrement(increment, r); + } + } + return r; + } + + /** + * Execute a list of put mutations. + * + * @param builder + * @param region + * @param puts + */ + protected void put(final MultiResponse.Builder builder, + final HRegion region, final List puts) { + @SuppressWarnings("unchecked") + Pair[] putsWithLocks = new Pair[puts.size()]; + + try { + ActionResult.Builder resultBuilder = ActionResult.newBuilder(); + NameBytesPair value = ProtobufUtil.toParameter(new Result()); + resultBuilder.setValue(value); + ActionResult result = resultBuilder.build(); + + int i = 0; + for (Mutate put : puts) { + Put p = ProtobufUtil.toPut(put); + Integer lock = getLockFromId(p.getLockId()); + putsWithLocks[i++] = new Pair(p, lock); + builder.addResult(result); + } + + requestCount.addAndGet(puts.size()); + if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + + OperationStatus codes[] = region.put(putsWithLocks); + for (i = 0; i < codes.length; i++) { + if (codes[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) { + result = ResponseConverter.buildActionResult( + new DoNotRetryIOException(codes[i].getExceptionMsg())); + builder.setResult(i, result); + } + } + } catch (IOException ie) { + ActionResult result = ResponseConverter.buildActionResult(ie); + for (int i = 0, n = puts.size(); i < n; i++) { + builder.setResult(i, result); + } + } + } + + /** + * Mutate a list of rows atomically. + * + * @param region + * @param mutates + * @throws IOException + */ + protected void mutateRows(final HRegion region, + final List mutates) throws IOException { + Mutate firstMutate = mutates.get(0); + if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + byte[] row = firstMutate.getRow().toByteArray(); + RowMutations rm = new RowMutations(row); + for (Mutate mutate: mutates) { + MutateType type = mutate.getMutateType(); + switch (mutate.getMutateType()) { + case PUT: + rm.add(ProtobufUtil.toPut(mutate)); + break; + case DELETE: + rm.add(ProtobufUtil.toDelete(mutate)); + break; + default: + throw new DoNotRetryIOException( + "mutate supports atomic put and/or delete, not " + + type.name()); + } + } + region.mutateRow(rm); + } +} diff --git a/src/main/protobuf/RegionAdmin.proto b/src/main/protobuf/Admin.proto similarity index 96% rename from src/main/protobuf/RegionAdmin.proto rename to src/main/protobuf/Admin.proto index c64d68b9441..132c5dd34c0 100644 --- a/src/main/protobuf/RegionAdmin.proto +++ b/src/main/protobuf/Admin.proto @@ -16,10 +16,10 @@ * limitations under the License. */ -// This file contains protocol buffers that are used for RegionAdmin service. +// This file contains protocol buffers that are used for Admin service. option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "RegionAdminProtos"; +option java_outer_classname = "AdminProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; option optimize_for = SPEED; @@ -146,7 +146,7 @@ message WALEntry { } message WALEdit { - repeated KeyValue keyValue = 1; + repeated bytes keyValue = 1; repeated FamilyScope familyScope = 2; enum ScopeType { @@ -197,7 +197,7 @@ message GetServerInfoResponse { required ServerName serverName = 1; } -service RegionAdminService { +service AdminService { rpc getRegionInfo(GetRegionInfoRequest) returns(GetRegionInfoResponse); diff --git a/src/main/protobuf/RegionClient.proto b/src/main/protobuf/Client.proto similarity index 78% rename from src/main/protobuf/RegionClient.proto rename to src/main/protobuf/Client.proto index 358382bda74..a7a19e032b8 100644 --- a/src/main/protobuf/RegionClient.proto +++ b/src/main/protobuf/Client.proto @@ -16,10 +16,10 @@ * limitations under the License. */ -// This file contains protocol buffers that are used for RegionClient service. +// This file contains protocol buffers that are used for Client service. option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "RegionClientProtos"; +option java_outer_classname = "ClientProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; option optimize_for = SPEED; @@ -34,27 +34,26 @@ message Column { repeated bytes qualifier = 2; } -message Attribute { - required string name = 1; - optional bytes value = 2; -} - /** * The protocol buffer version of Get */ message Get { required bytes row = 1; repeated Column column = 2; - repeated Attribute attribute = 3; + repeated NameBytesPair attribute = 3; optional uint64 lockId = 4; - optional Parameter filter = 5; + optional NameBytesPair filter = 5; optional TimeRange timeRange = 6; optional uint32 maxVersions = 7 [default = 1]; optional bool cacheBlocks = 8 [default = true]; } +/** + * For performance reason, we don't use KeyValue + * here. We use the actual KeyValue bytes. + */ message Result { - repeated KeyValue value = 1; + repeated bytes keyValueBytes = 1; } /** @@ -89,8 +88,6 @@ message GetResponse { /** * Condition to check if the value of a given cell (row, * family, qualifier) matches a value via a given comparator. - * The value is optional since some comparator may not require - * a value to compare, for example, checking null. * * Condition is used in check and mutate operations. */ @@ -99,8 +96,7 @@ message Condition { required bytes family = 2; required bytes qualifier = 3; required CompareType compareType = 4; - required Comparator comparator = 5; - optional bytes value = 6; + required NameBytesPair comparator = 5; enum CompareType { LESS = 0; @@ -111,17 +107,6 @@ message Condition { GREATER = 5; NO_OP = 6; } - - enum Comparator { - BINARY_COMPARATOR = 0; - BINARY_PREFIX_COMPARATOR = 1; - BIT_AND_COMPARATOR = 2; - BIT_OR_COMPARATOR = 3; - BIT_XOR_COMPARATOR = 4; - NULL_COMPARATOR = 5; - REGEX_STRING_COMPARATOR = 6; - SUBSTRING_COMPARATOR = 7; - } } /** @@ -133,7 +118,7 @@ message Mutate { required bytes row = 1; required MutateType mutateType = 2; repeated ColumnValue columnValue = 3; - repeated Attribute attribute = 4; + repeated NameBytesPair attribute = 4; optional uint64 timestamp = 5; optional uint64 lockId = 6; optional bool writeToWAL = 7 [default = true]; @@ -147,22 +132,23 @@ message Mutate { INCREMENT = 1; PUT = 2; DELETE = 3; - DELETE_COLUMN = 4; - DELETE_FAMILY = 5; + } + + enum DeleteType { + DELETE_ONE_VERSION = 0; + DELETE_MULTIPLE_VERSIONS = 1; + DELETE_FAMILY = 2; } message ColumnValue { required bytes family = 1; repeated QualifierValue qualifierValue = 2; - // Default timestamp for qalifier values, - // or timestamp of the column family to be deleted - optional uint64 timestamp = 3; - message QualifierValue { - required bytes qualifier = 1; + optional bytes qualifier = 1; optional bytes value = 2; optional uint64 timestamp = 3; + optional DeleteType deleteType = 4; } } } @@ -200,15 +186,14 @@ message MutateResponse { */ message Scan { repeated Column column = 1; - repeated Attribute attribute = 2; + repeated NameBytesPair attribute = 2; optional bytes startRow = 3; optional bytes stopRow = 4; - optional Parameter filter = 5; + optional NameBytesPair filter = 5; optional TimeRange timeRange = 6; optional uint32 maxVersions = 7 [default = 1]; optional bool cacheBlocks = 8 [default = true]; - optional uint32 rowsToCache = 9; - optional uint32 batchSize = 10; + optional uint32 batchSize = 9; } /** @@ -223,10 +208,11 @@ message Scan { * a trip if you are not interested in remaining results. */ message ScanRequest { - optional uint64 scannerId = 1; + optional RegionSpecifier region = 1; optional Scan scan = 2; - optional uint32 numberOfRows = 3; - optional bool closeScanner = 4; + optional uint64 scannerId = 3; + optional uint32 numberOfRows = 4; + optional bool closeScanner = 5; } /** @@ -276,16 +262,6 @@ message BulkLoadHFileResponse { required bool loaded = 1; } -message Parameter { - required string type = 1; - optional bytes binaryValue = 2; -} - -message Property { - required string name = 1; - required string value = 2; -} - /** * An individual coprocessor call. You must specify the protocol, * the method, and the row to which the call will be executed. @@ -302,8 +278,8 @@ message Exec { required bytes row = 1; required string protocolName = 2; required string methodName = 3; - repeated Property property = 4; - repeated Parameter parameter = 5; + repeated NameStringPair property = 4; + repeated NameBytesPair parameter = 5; } /** @@ -320,32 +296,40 @@ message ExecCoprocessorRequest { } message ExecCoprocessorResponse { - required bytes regionName = 1; - required Parameter value = 2; + required NameBytesPair value = 1; } /** - * You can execute a list of actions on regions assigned - * to the same region server, if you can't find an individual - * call which meets your requirement. + * An individual action result. The result will in the + * same order as the action in the request. If an action + * returns a value, it is set in value field. If it doesn't + * return anything, the result will be empty. If an action + * fails to execute due to any exception, the exception + * is returned as a stringified parameter. + */ +message ActionResult { + optional NameBytesPair value = 1; + optional NameBytesPair exception = 2; +} + +/** + * You can execute a list of actions on a given region in order. * - * The multi request can have a list of requests. Each request - * should be a protocol buffer encoded request such as GetRequest, - * MutateRequest, ExecCoprocessorRequest. - * - * If the list contains multiple mutate requests only, atomic can - * be set to make sure they can be processed atomically. + * If it is a list of mutate actions, atomic can be set + * to make sure they can be processed atomically, just like + * RowMutations. */ message MultiRequest { - repeated Parameter request = 1; - optional bool atomic = 2; + required RegionSpecifier region = 1; + repeated NameBytesPair action = 2; + optional bool atomic = 3; } message MultiResponse { - repeated Parameter response = 1; + repeated ActionResult result = 1; } -service RegionClientService { +service ClientService { rpc get(GetRequest) returns(GetResponse); diff --git a/src/main/protobuf/hbase.proto b/src/main/protobuf/hbase.proto index da7878848b0..12e6053fc68 100644 --- a/src/main/protobuf/hbase.proto +++ b/src/main/protobuf/hbase.proto @@ -101,3 +101,16 @@ message ServerName { optional uint32 port = 2; optional uint64 startCode = 3; } + +// Comment data structures + +message NameStringPair { + required string name = 1; + required string value = 2; +} + +message NameBytesPair { + required string name = 1; + optional bytes value = 2; +} + diff --git a/src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java b/src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java index c7284dc60f2..d6ae0e23bb9 100644 --- a/src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java +++ b/src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java @@ -41,6 +41,9 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Writables; @@ -58,6 +61,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * Test {@link CatalogTracker} */ @@ -131,13 +137,16 @@ public class TestCatalogTracker { /** * Test interruptable while blocking wait on root and meta. * @throws IOException + * @throws ServiceException * @throws InterruptedException */ @Test public void testInterruptWaitOnMetaAndRoot() - throws IOException, InterruptedException { - HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + throws IOException, InterruptedException, ServiceException { + final ClientProtocol client = Mockito.mock(ClientProtocol.class); + HConnection connection = mockConnection(null, client); try { + Mockito.when(client.get((RpcController)Mockito.any(), (GetRequest)Mockito.any())). + thenReturn(GetResponse.newBuilder().build()); final CatalogTracker ct = constructAndStartCatalogTracker(connection); ServerName hsa = ct.getRootLocation(); Assert.assertNull(hsa); @@ -176,10 +185,11 @@ public class TestCatalogTracker { */ @Test public void testServerNotRunningIOException() - throws IOException, InterruptedException, KeeperException { + throws IOException, InterruptedException, KeeperException, ServiceException { // Mock an HRegionInterface. final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + final ClientProtocol client = Mockito.mock(ClientProtocol.class); + HConnection connection = mockConnection(implementation, client); try { // If a 'getRegionInfo' is called on mocked HRegionInterface, throw IOE // the first time. 'Succeed' the second time we are called. @@ -198,6 +208,8 @@ public class TestCatalogTracker { Mockito.when(connection.getRegionServerWithRetries((ServerCallable)Mockito.any())). thenReturn(getMetaTableRowResult()); + Mockito.when(client.get((RpcController)Mockito.any(), (GetRequest)Mockito.any())). + thenReturn(GetResponse.newBuilder().build()); // Now start up the catalogtracker with our doctored Connection. final CatalogTracker ct = constructAndStartCatalogTracker(connection); try { @@ -245,17 +257,18 @@ public class TestCatalogTracker { * @throws IOException * @throws InterruptedException * @throws KeeperException + * @throws ServiceException */ @Test public void testGetMetaServerConnectionFails() - throws IOException, InterruptedException, KeeperException { - // Mock an HRegionInterface. - final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + throws IOException, InterruptedException, KeeperException, ServiceException { + // Mock an ClientProtocol. + final ClientProtocol implementation = Mockito.mock(ClientProtocol.class); + HConnection connection = mockConnection(null, implementation); try { // If a 'get' is called on mocked interface, throw connection refused. - Mockito.when(implementation.get((byte[]) Mockito.any(), (Get) Mockito.any())). - thenThrow(new ConnectException("Connection refused")); + Mockito.when(implementation.get((RpcController) Mockito.any(), (GetRequest) Mockito.any())). + thenThrow(new ServiceException(new ConnectException("Connection refused"))); // Now start up the catalogtracker with our doctored Connection. final CatalogTracker ct = constructAndStartCatalogTracker(connection); try { @@ -371,7 +384,7 @@ public class TestCatalogTracker { // to make our test work. // Mock an HRegionInterface. final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + HConnection connection = mockConnection(implementation, null); try { // Now the ct is up... set into the mocks some answers that make it look // like things have been getting assigned. Make it so we'll return a @@ -419,6 +432,7 @@ public class TestCatalogTracker { /** * @param implementation An {@link HRegionInterface} instance; you'll likely * want to pass a mocked HRS; can be null. + * @param client A mocked ClientProtocol instance, can be null * @return Mock up a connection that returns a {@link Configuration} when * {@link HConnection#getConfiguration()} is called, a 'location' when * {@link HConnection#getRegionLocation(byte[], byte[], boolean)} is called, @@ -429,7 +443,8 @@ public class TestCatalogTracker { * when done with this mocked Connection. * @throws IOException */ - private HConnection mockConnection(final HRegionInterface implementation) + private HConnection mockConnection( + final HRegionInterface implementation, final ClientProtocol client) throws IOException { HConnection connection = HConnectionTestingUtility.getMockedConnection(UTIL.getConfiguration()); @@ -449,6 +464,11 @@ public class TestCatalogTracker { Mockito.when(connection.getHRegionConnection(Mockito.anyString(), Mockito.anyInt())). thenReturn(implementation); } + if (client != null) { + // If a call to getClient, return this implementation. + Mockito.when(connection.getClient(Mockito.anyString(), Mockito.anyInt())). + thenReturn(client); + } return connection; } diff --git a/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java b/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java index 00424683f93..3cfc02b775c 100644 --- a/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java +++ b/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java @@ -31,8 +31,10 @@ import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -42,6 +44,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * Test MetaReader/Editor but without spinning up a cluster. * We mock regionserver back and forth (we do spin up a zk cluster). @@ -82,7 +87,8 @@ public class TestMetaReaderEditorNoCluster { * @throws InterruptedException */ @Test - public void testRideOverServerNotRunning() throws IOException, InterruptedException { + public void testRideOverServerNotRunning() + throws IOException, InterruptedException, ServiceException { // Need a zk watcher. ZooKeeperWatcher zkw = new ZooKeeperWatcher(UTIL.getConfiguration(), this.getClass().getSimpleName(), ABORTABLE, true); @@ -92,27 +98,16 @@ public class TestMetaReaderEditorNoCluster { HConnection connection = null; CatalogTracker ct = null; try { - // Mock an HRegionInterface. Our mock implementation will fail a few + // Mock an ClientProtocol. Our mock implementation will fail a few // times when we go to open a scanner. - final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - // When openScanner called throw IOE 'Server not running' a few times + final ClientProtocol implementation = Mockito.mock(ClientProtocol.class); + // When scan called throw IOE 'Server not running' a few times // before we return a scanner id. Whats WEIRD is that these // exceptions do not show in the log because they are caught and only // printed if we FAIL. We eventually succeed after retry so these don't // show. We will know if they happened or not because we will ask - // mockito at the end of this test to verify that openscanner was indeed + // mockito at the end of this test to verify that scan was indeed // called the wanted number of times. - final long scannerid = 123L; - Mockito.when(implementation.openScanner((byte [])Mockito.any(), - (Scan)Mockito.any())). - thenThrow(new IOException("Server not running (1 of 3)")). - thenThrow(new IOException("Server not running (2 of 3)")). - thenThrow(new IOException("Server not running (3 of 3)")). - thenReturn(scannerid); - // Make it so a verifiable answer comes back when next is called. Return - // the verifiable answer and then a null so we stop scanning. Our - // verifiable answer is something that looks like a row in META with - // a server and startcode that is that of the above defined servername. List kvs = new ArrayList(); final byte [] rowToVerify = Bytes.toBytes("rowToVerify"); kvs.add(new KeyValue(rowToVerify, @@ -124,10 +119,19 @@ public class TestMetaReaderEditorNoCluster { kvs.add(new KeyValue(rowToVerify, HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn.getStartcode()))); - final Result [] result = new Result [] {new Result(kvs)}; - Mockito.when(implementation.next(Mockito.anyLong(), Mockito.anyInt())). - thenReturn(result). - thenReturn(null); + final Result [] results = new Result [] {new Result(kvs)}; + ScanResponse.Builder builder = ScanResponse.newBuilder(); + for (Result result: results) { + builder.addResult(ProtobufUtil.toResult(result)); + } + Mockito.when(implementation.scan( + (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). + thenThrow(new ServiceException("Server not running (1 of 3)")). + thenThrow(new ServiceException("Server not running (2 of 3)")). + thenThrow(new ServiceException("Server not running (3 of 3)")). + thenReturn(ScanResponse.newBuilder().setScannerId(1234567890L).build()) + .thenReturn(builder.build()).thenReturn( + ScanResponse.newBuilder().setMoreResults(false).build()); // Associate a spied-upon HConnection with UTIL.getConfiguration. Need // to shove this in here first so it gets picked up all over; e.g. by @@ -150,7 +154,7 @@ public class TestMetaReaderEditorNoCluster { // Now shove our HRI implementation into the spied-upon connection. Mockito.doReturn(implementation). - when(connection).getHRegionConnection(Mockito.anyString(), Mockito.anyInt()); + when(connection).getClient(Mockito.anyString(), Mockito.anyInt()); // Now start up the catalogtracker with our doctored Connection. ct = new CatalogTracker(zkw, null, connection, ABORTABLE, 0); @@ -160,10 +164,10 @@ public class TestMetaReaderEditorNoCluster { assertTrue(hris.size() == 1); assertTrue(hris.firstEntry().getKey().equals(HRegionInfo.FIRST_META_REGIONINFO)); assertTrue(Bytes.equals(rowToVerify, hris.firstEntry().getValue().getRow())); - // Finally verify that openscanner was called four times -- three times - // with exception and then on 4th attempt we succeed. - Mockito.verify(implementation, Mockito.times(4)). - openScanner((byte [])Mockito.any(), (Scan)Mockito.any()); + // Finally verify that scan was called four times -- three times + // with exception and then on 4th, 5th and 6th attempt we succeed + Mockito.verify(implementation, Mockito.times(6)). + scan((RpcController)Mockito.any(), (ScanRequest)Mockito.any()); } finally { if (ct != null) ct.stop(); HConnectionManager.deleteConnection(UTIL.getConfiguration(), true); diff --git a/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java b/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java index e34d8bcdd4f..8af0f91d32d 100644 --- a/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java +++ b/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionImplementation; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionKey; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.mockito.Mockito; /** @@ -92,7 +93,8 @@ public class HConnectionTestingUtility { * @throws IOException */ public static HConnection getMockedConnectionAndDecorate(final Configuration conf, - final HRegionInterface implementation, final ServerName sn, final HRegionInfo hri) + final HRegionInterface implementation, final ClientProtocol client, + final ServerName sn, final HRegionInfo hri) throws IOException { HConnection c = HConnectionTestingUtility.getMockedConnection(conf); Mockito.doNothing().when(c).close(); @@ -108,6 +110,11 @@ public class HConnectionTestingUtility { Mockito.when(c.getHRegionConnection(Mockito.anyString(), Mockito.anyInt())). thenReturn(implementation); } + if (client != null) { + // If a call to getClient, return this client. + Mockito.when(c.getClient(Mockito.anyString(), Mockito.anyInt())). + thenReturn(client); + } return c; } diff --git a/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java b/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java index f2f8ee38b6e..d8e3ee1709b 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java +++ b/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java @@ -99,6 +99,7 @@ import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; import com.google.protobuf.Message; +import com.google.protobuf.RpcController; @Category(SmallTests.class) public class TestHbaseObjectWritable extends TestCase { @@ -523,6 +524,7 @@ public class TestHbaseObjectWritable extends TestCase { assertEquals(80,HbaseObjectWritable.getClassCode(Message.class).intValue()); assertEquals(81,HbaseObjectWritable.getClassCode(Array.class).intValue()); + assertEquals(82,HbaseObjectWritable.getClassCode(RpcController.class).intValue()); } /** @@ -531,7 +533,7 @@ public class TestHbaseObjectWritable extends TestCase { * note on the test above. */ public void testGetNextObjectCode(){ - assertEquals(82,HbaseObjectWritable.getNextClassCode()); + assertEquals(83,HbaseObjectWritable.getNextClassCode()); } @org.junit.Rule diff --git a/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 41616c8bf49..a59e15212c6 100644 --- a/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -50,6 +50,23 @@ import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary; import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.ipc.ProtocolSignature; import org.apache.hadoop.hbase.ipc.RpcServer; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse; import org.apache.hadoop.hbase.regionserver.CompactionRequestor; import org.apache.hadoop.hbase.regionserver.FlushRequester; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -64,6 +81,9 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * A mock RegionServer implementation. * Use this when you can't bend Mockito to your liking (e.g. return null result @@ -72,7 +92,7 @@ import org.apache.zookeeper.KeeperException; * {@link #setGetResult(byte[], byte[], Result)} for how to fill the backing data * store that the get pulls from. */ -class MockRegionServer implements HRegionInterface, RegionServerServices { +class MockRegionServer implements HRegionInterface, ClientProtocol, RegionServerServices { private final ServerName sn; private final ZooKeeperWatcher zkw; private final Configuration conf; @@ -245,9 +265,8 @@ class MockRegionServer implements HRegionInterface, RegionServerServices { @Override public Result get(byte[] regionName, Get get) throws IOException { - Map m = this.gets.get(regionName); - if (m == null) return null; - return m.get(get.getRow()); + // TODO Auto-generated method stub + return null; } @Override @@ -597,4 +616,87 @@ class MockRegionServer implements HRegionInterface, RegionServerServices { public void mutateRow(byte[] regionName, RowMutations rm) throws IOException { // TODO Auto-generated method stub } + + @Override + public GetResponse get(RpcController controller, GetRequest request) + throws ServiceException { + byte[] regionName = request.getRegion().getValue().toByteArray(); + Map m = this.gets.get(regionName); + GetResponse.Builder builder = GetResponse.newBuilder(); + if (m != null) { + byte[] row = request.getGet().getRow().toByteArray(); + builder.setResult(ProtobufUtil.toResult(m.get(row))); + } + return builder.build(); + } + + @Override + public MutateResponse mutate(RpcController controller, MutateRequest request) + throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public ScanResponse scan(RpcController controller, ScanRequest request) + throws ServiceException { + ScanResponse.Builder builder = ScanResponse.newBuilder(); + try { + if (request.hasScan()) { + byte[] regionName = request.getRegion().getValue().toByteArray(); + builder.setScannerId(openScanner(regionName, null)); + builder.setMoreResults(true); + } + else { + long scannerId = request.getScannerId(); + Result result = next(scannerId); + if (result != null) { + builder.addResult(ProtobufUtil.toResult(result)); + builder.setMoreResults(true); + } + else { + builder.setMoreResults(false); + close(scannerId); + } + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + return builder.build(); + } + + @Override + public LockRowResponse lockRow(RpcController controller, + LockRowRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public UnlockRowResponse unlockRow(RpcController controller, + UnlockRowRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public BulkLoadHFileResponse bulkLoadHFile(RpcController controller, + BulkLoadHFileRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public ExecCoprocessorResponse execCoprocessor(RpcController controller, + ExecCoprocessorRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( + RpcController controller, MultiRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } } \ No newline at end of file diff --git a/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java b/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java index 6ed4ba2c4fc..b84a1157ec0 100644 --- a/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java +++ b/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java @@ -31,23 +31,25 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HServerLoad; -import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.catalog.CatalogTracker; -import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.executor.EventHandler.EventType; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType; import org.apache.hadoop.hbase.executor.RegionTransitionData; -import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.master.handler.ServerShutdownHandler; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.RegionOpeningState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; @@ -65,6 +67,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * Test {@link AssignmentManager} @@ -151,7 +156,7 @@ public class TestAssignmentManager { */ @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithOpenedNode() - throws IOException, KeeperException, InterruptedException { + throws IOException, KeeperException, InterruptedException, ServiceException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { @@ -194,7 +199,7 @@ public class TestAssignmentManager { @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithClosedNode() - throws IOException, KeeperException, InterruptedException { + throws IOException, KeeperException, InterruptedException, ServiceException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { @@ -238,7 +243,7 @@ public class TestAssignmentManager { @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithOfflineNode() - throws IOException, KeeperException, InterruptedException { + throws IOException, KeeperException, InterruptedException, ServiceException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { @@ -363,7 +368,8 @@ public class TestAssignmentManager { * @throws IOException */ @Test - public void testShutdownHandler() throws KeeperException, IOException { + public void testShutdownHandler() + throws KeeperException, IOException, ServiceException { // Create and startup an executor. This is used by AssignmentManager // handling zk callbacks. ExecutorService executor = startupMasterExecutor("testShutdownHandler"); @@ -380,19 +386,20 @@ public class TestAssignmentManager { // Need to set up a fake scan of meta for the servershutdown handler // Make an RS Interface implementation. Make it so a scanner can go against it. - HRegionInterface implementation = Mockito.mock(HRegionInterface.class); + ClientProtocol implementation = Mockito.mock(ClientProtocol.class); // Get a meta row result that has region up on SERVERNAME_A Result r = Mocking.getMetaTableRowResult(REGIONINFO, SERVERNAME_A); - Mockito.when(implementation.openScanner((byte [])Mockito.any(), (Scan)Mockito.any())). - thenReturn(System.currentTimeMillis()); - // Return a good result first and then return null to indicate end of scan - Mockito.when(implementation.next(Mockito.anyLong(), Mockito.anyInt())). - thenReturn(new Result [] {r}, (Result [])null); + ScanResponse.Builder builder = ScanResponse.newBuilder(); + builder.setMoreResults(false); + builder.addResult(ProtobufUtil.toResult(r)); + Mockito.when(implementation.scan( + (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). + thenReturn(builder.build()); // Get a connection w/ mocked up common methods. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(HTU.getConfiguration(), - implementation, SERVERNAME_B, REGIONINFO); + null, implementation, SERVERNAME_B, REGIONINFO); // Make it so we can get a catalogtracker from servermanager.. .needed // down in guts of server shutdown handler. @@ -531,7 +538,7 @@ public class TestAssignmentManager { */ private AssignmentManagerWithExtrasForTesting setUpMockedAssignmentManager(final Server server, final ServerManager manager) - throws IOException, KeeperException { + throws IOException, KeeperException, ServiceException { // We need a mocked catalog tracker. Its used by our AM instance. CatalogTracker ct = Mockito.mock(CatalogTracker.class); // Make an RS Interface implementation. Make it so a scanner can go against @@ -539,21 +546,24 @@ public class TestAssignmentManager { // messing with. Needed when "new master" joins cluster. AM will try and // rebuild its list of user regions and it will also get the HRI that goes // with an encoded name by doing a Get on .META. - HRegionInterface ri = Mockito.mock(HRegionInterface.class); + ClientProtocol ri = Mockito.mock(ClientProtocol.class); // Get a meta row result that has region up on SERVERNAME_A for REGIONINFO Result r = Mocking.getMetaTableRowResult(REGIONINFO, SERVERNAME_A); - Mockito.when(ri .openScanner((byte[]) Mockito.any(), (Scan) Mockito.any())). - thenReturn(System.currentTimeMillis()); - // Return good result 'r' first and then return null to indicate end of scan - Mockito.when(ri.next(Mockito.anyLong(), Mockito.anyInt())). - thenReturn(new Result[] { r }, (Result[]) null); + ScanResponse.Builder builder = ScanResponse.newBuilder(); + builder.setMoreResults(false); + builder.addResult(ProtobufUtil.toResult(r)); + Mockito.when(ri.scan( + (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). + thenReturn(builder.build()); // If a get, return the above result too for REGIONINFO - Mockito.when(ri.get((byte[]) Mockito.any(), (Get) Mockito.any())). - thenReturn(r); + GetResponse.Builder getBuilder = GetResponse.newBuilder(); + getBuilder.setResult(ProtobufUtil.toResult(r)); + Mockito.when(ri.get((RpcController)Mockito.any(), (GetRequest) Mockito.any())). + thenReturn(getBuilder.build()); // Get a connection w/ mocked up common methods. HConnection connection = HConnectionTestingUtility. - getMockedConnectionAndDecorate(HTU.getConfiguration(), ri, SERVERNAME_B, - REGIONINFO); + getMockedConnectionAndDecorate(HTU.getConfiguration(), null, + ri, SERVERNAME_B, REGIONINFO); // Make it so we can get the connection from our mocked catalogtracker Mockito.when(ct.getConnection()).thenReturn(connection); // Create and startup an executor. Used by AM handling zk callbacks. diff --git a/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index b4dcb83b900..cedf31e3544 100644 --- a/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ b/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -54,6 +54,10 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; @@ -62,6 +66,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + @Category(SmallTests.class) public class TestCatalogJanitor { /** @@ -76,12 +83,22 @@ public class TestCatalogJanitor { MockServer(final HBaseTestingUtility htu) throws NotAllMetaRegionsOnlineException, IOException, InterruptedException { this.c = htu.getConfiguration(); + ClientProtocol ri = Mockito.mock(ClientProtocol.class); + MutateResponse.Builder builder = MutateResponse.newBuilder(); + builder.setProcessed(true); + try { + Mockito.when(ri.mutate( + (RpcController)Mockito.any(), (MutateRequest)Mockito.any())). + thenReturn(builder.build()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } // Mock an HConnection and a HRegionInterface implementation. Have the // HConnection return the HRI. Have the HRI return a few mocked up responses // to make our test work. this.connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(this.c, - Mockito.mock(HRegionInterface.class), + Mockito.mock(HRegionInterface.class), ri, new ServerName("example.org,12345,6789"), HRegionInfo.FIRST_META_REGIONINFO); // Set hbase.rootdir into test dir. diff --git a/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java index ceca6f57396..41b339ca368 100644 --- a/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java +++ b/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java @@ -194,7 +194,7 @@ public class TestMasterNoCluster { // associate so the below mocking of a connection will fail. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(TESTUTIL.getConfiguration(), - rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); + rs0, rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); return new CatalogTracker(zk, conf, connection, abortable, defaultTimeout); } }; @@ -271,7 +271,7 @@ public class TestMasterNoCluster { // of a connection will fail. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(TESTUTIL.getConfiguration(), - rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); + rs0, rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); return new CatalogTracker(zk, conf, connection, abortable, defaultTimeout); } }; diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java b/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java index cac29897efc..fa177aececa 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java @@ -25,6 +25,11 @@ import java.util.List; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; + +import com.google.protobuf.ServiceException; /** * A region server that will OOME. @@ -42,10 +47,16 @@ public class OOMERegionServer extends HRegionServer { public void put(byte [] regionName, Put put) throws IOException { - super.put(regionName, put); - for (int i = 0; i < 30; i++) { - // Add the batch update 30 times to bring on the OOME faster. - this.retainer.add(put); + try { + MutateRequest request = + RequestConverter.buildMutateRequest(regionName, put); + super.mutate(null, request); + for (int i = 0; i < 30; i++) { + // Add the batch update 30 times to bring on the OOME faster. + this.retainer.add(put); + } + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); } } diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index a1bf73b6b6e..d0cad45a429 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -39,6 +39,9 @@ import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.junit.Test; @@ -145,7 +148,9 @@ public class TestHRegionServerBulkLoad { LOG.debug("Going to connect to server " + location + " for row " + Bytes.toStringBinary(row)); byte[] regionName = location.getRegionInfo().getRegionName(); - server.bulkLoadHFiles(famPaths, regionName); + BulkLoadHFileRequest request = + RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName); + server.bulkLoadHFile(null, request); return null; } }.withRetries(); @@ -159,6 +164,8 @@ public class TestHRegionServerBulkLoad { public Void call() throws Exception { LOG.debug("compacting " + location + " for row " + Bytes.toStringBinary(row)); + HRegionInterface server = connection.getHRegionConnection( + location.getHostname(), location.getPort()); server.compactRegion(location.getRegionInfo(), true); numCompactions.incrementAndGet(); return null;