From f9dc4cad63b1ffcd1f9050b9b8e8d89f44ecd44a Mon Sep 17 00:00:00 2001 From: huzheng Date: Mon, 15 May 2017 16:00:01 +0800 Subject: [PATCH] HBASE-11013 Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions Signed-off-by: Guanghao Zhang --- .../org/apache/hadoop/hbase/client/Admin.java | 49 +- .../hadoop/hbase/client/HBaseAdmin.java | 63 +- .../security/access/TablePermission.java | 4 + .../ClientSnapshotDescriptionUtils.java | 8 +- .../snapshot/CorruptedSnapshotException.java | 2 +- .../snapshot/HBaseSnapshotException.java | 2 +- .../snapshot/RestoreSnapshotException.java | 2 +- .../snapshot/SnapshotCreationException.java | 2 +- .../SnapshotDoesNotExistException.java | 2 +- .../snapshot/SnapshotExistsException.java | 2 +- .../hbase/client/TestSnapshotFromAdmin.java | 2 +- .../hbase/protobuf/generated/HBaseProtos.java | 1358 +-------------- .../protobuf/generated/MasterProtos.java | 1088 ++++++------ .../protobuf/generated/SnapshotProtos.java | 1551 ++++++++++++++++- hbase-protocol/src/main/protobuf/HBase.proto | 17 - hbase-protocol/src/main/protobuf/Master.proto | 2 + .../src/main/protobuf/Snapshot.proto | 19 + .../hbase/tmpl/master/MasterStatusTmpl.jamon | 2 +- .../BaseMasterAndRegionObserver.java | 2 +- .../hbase/coprocessor/BaseMasterObserver.java | 2 +- .../hbase/coprocessor/MasterObserver.java | 2 +- .../TableSnapshotInputFormatImpl.java | 2 +- .../hbase/master/MasterCoprocessorHost.java | 2 +- .../hbase/master/MasterRpcServices.java | 5 +- .../hadoop/hbase/master/SnapshotSentinel.java | 2 +- .../master/snapshot/CloneSnapshotHandler.java | 14 +- .../DisabledTableSnapshotHandler.java | 4 +- .../snapshot/EnabledTableSnapshotHandler.java | 4 +- .../snapshot/MasterSnapshotVerifier.java | 2 +- .../snapshot/RestoreSnapshotHandler.java | 15 +- .../master/snapshot/SnapshotManager.java | 21 +- .../master/snapshot/TakeSnapshotHandler.java | 2 +- .../hadoop/hbase/regionserver/HRegion.java | 2 +- .../hbase/regionserver/HRegionServer.java | 2 - .../snapshot/FlushSnapshotSubprocedure.java | 3 +- .../snapshot/RegionServerSnapshotManager.java | 2 +- .../security/access/AccessControlLists.java | 2 +- .../security/access/AccessController.java | 2 +- .../hadoop/hbase/snapshot/CreateSnapshot.java | 8 +- .../hadoop/hbase/snapshot/ExportSnapshot.java | 2 +- .../hbase/snapshot/RestoreSnapshotHelper.java | 30 +- .../snapshot/SnapshotDescriptionUtils.java | 51 +- .../hadoop/hbase/snapshot/SnapshotInfo.java | 2 +- .../hbase/snapshot/SnapshotManifest.java | 2 +- .../hbase/snapshot/SnapshotManifestV1.java | 2 +- .../hbase/snapshot/SnapshotManifestV2.java | 4 +- .../hbase/snapshot/SnapshotReferenceUtil.java | 2 +- .../hbase-webapps/master/snapshot.jsp | 2 +- .../hbase-webapps/master/snapshotsStats.jsp | 2 +- .../hbase/client/TestSnapshotFromClient.java | 2 +- .../hbase/client/TestSnapshotWithAcl.java | 243 +++ .../hbase/coprocessor/TestMasterObserver.java | 2 +- .../cleaner/TestSnapshotFromMaster.java | 2 +- .../snapshot/TestSnapshotFileCache.java | 1 - .../hbase/security/access/SecureTestUtil.java | 2 +- .../security/access/TestAccessController.java | 2 +- .../access/TestWithDisabledAuthorization.java | 2 +- .../hbase/snapshot/SnapshotTestingUtils.java | 6 +- .../hbase/snapshot/TestExportSnapshot.java | 2 +- .../snapshot/TestFlushSnapshotFromClient.java | 2 +- .../TestRestoreFlushSnapshotFromClient.java | 2 +- .../snapshot/TestRestoreSnapshotHelper.java | 2 +- .../snapshot/TestSnapshotClientRetries.java | 2 +- .../TestSnapshotDescriptionUtils.java | 5 +- .../hbase/snapshot/TestSnapshotManifest.java | 3 +- hbase-shell/src/main/ruby/hbase.rb | 1 + hbase-shell/src/main/ruby/hbase/admin.rb | 12 +- .../ruby/shell/commands/clone_snapshot.rb | 11 +- .../ruby/shell/commands/restore_snapshot.rb | 10 +- 69 files changed, 2694 insertions(+), 1994 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index 82df3f4a658..5b3744a33ec 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaRetriever; import org.apache.hadoop.hbase.quotas.QuotaSettings; @@ -1146,7 +1147,7 @@ public interface Admin extends Abortable, Closeable { @Deprecated void snapshot(final String snapshotName, final TableName tableName, - HBaseProtos.SnapshotDescription.Type type) throws IOException, SnapshotCreationException, + SnapshotDescription.Type type) throws IOException, SnapshotCreationException, IllegalArgumentException; /** @@ -1168,7 +1169,7 @@ public interface Admin extends Abortable, Closeable { * @throws IllegalArgumentException if the snapshot request is formatted incorrectly */ @Deprecated - void snapshot(HBaseProtos.SnapshotDescription snapshot) + void snapshot(SnapshotDescription snapshot) throws IOException, SnapshotCreationException, IllegalArgumentException; /** @@ -1182,7 +1183,7 @@ public interface Admin extends Abortable, Closeable { * @throws IllegalArgumentException if the snapshot request is formatted incorrectly */ @Deprecated - MasterProtos.SnapshotResponse takeSnapshotAsync(HBaseProtos.SnapshotDescription snapshot) + MasterProtos.SnapshotResponse takeSnapshotAsync(SnapshotDescription snapshot) throws IOException, SnapshotCreationException; /** @@ -1202,7 +1203,7 @@ public interface Admin extends Abortable, Closeable { * unknown */ @Deprecated - boolean isSnapshotFinished(final HBaseProtos.SnapshotDescription snapshot) + boolean isSnapshotFinished(final SnapshotDescription snapshot) throws IOException, HBaseSnapshotException, UnknownSnapshotException; /** @@ -1268,6 +1269,23 @@ public interface Admin extends Abortable, Closeable { void restoreSnapshot(final String snapshotName, boolean takeFailSafeSnapshot) throws IOException, RestoreSnapshotException; + /** + * Restore the specified snapshot on the original table. (The table must be disabled) If + * 'takeFailSafeSnapshot' is set to true, a snapshot of the current table is taken before + * executing the restore operation. In case of restore failure, the failsafe snapshot will be + * restored. If the restore completes without problem the failsafe snapshot is deleted. The + * failsafe snapshot name is configurable by using the property + * "hbase.snapshot.restore.failsafe.name". + * @param snapshotName name of the snapshot to restore + * @param takeFailSafeSnapshot true if the failsafe snapshot should be taken + * @param restoreAcl true to restore acl of snapshot into table. + * @throws IOException if a remote or network exception occurs + * @throws RestoreSnapshotException if snapshot failed to be restored + * @throws IllegalArgumentException if the restore request is formatted incorrectly + */ + void restoreSnapshot(final String snapshotName, boolean takeFailSafeSnapshot, boolean restoreAcl) + throws IOException, RestoreSnapshotException; + /** * Create a new table by cloning the snapshot content. * @@ -1294,6 +1312,19 @@ public interface Admin extends Abortable, Closeable { void cloneSnapshot(final String snapshotName, final TableName tableName) throws IOException, TableExistsException, RestoreSnapshotException; + /** + * Create a new table by cloning the snapshot content. + * @param snapshotName name of the snapshot to be cloned + * @param tableName name of the table where the snapshot will be restored + * @param restoreAcl true to restore acl of snapshot into newly created table + * @throws IOException if a remote or network exception occurs + * @throws TableExistsException if table to be created already exists + * @throws RestoreSnapshotException if snapshot failed to be cloned + * @throws IllegalArgumentException if the specified table has not a valid name + */ + void cloneSnapshot(final String snapshotName, final TableName tableName, final boolean restoreAcl) + throws IOException, TableExistsException, RestoreSnapshotException; + /** * Execute a distributed procedure on a cluster. * @@ -1342,7 +1373,7 @@ public interface Admin extends Abortable, Closeable { * @throws IOException if a network error occurs */ @Deprecated - List listSnapshots() throws IOException; + List listSnapshots() throws IOException; /** * List all the completed snapshots matching the given regular expression. @@ -1352,7 +1383,7 @@ public interface Admin extends Abortable, Closeable { * @throws IOException if a remote or network exception occurs */ @Deprecated - List listSnapshots(String regex) throws IOException; + List listSnapshots(String regex) throws IOException; /** * List all the completed snapshots matching the given pattern. @@ -1362,7 +1393,7 @@ public interface Admin extends Abortable, Closeable { * @throws IOException if a remote or network exception occurs */ @Deprecated - List listSnapshots(Pattern pattern) throws IOException; + List listSnapshots(Pattern pattern) throws IOException; /** * List all the completed snapshots matching the given table name regular expression and snapshot @@ -1373,7 +1404,7 @@ public interface Admin extends Abortable, Closeable { * @throws IOException if a remote or network exception occurs */ @Deprecated - List listTableSnapshots(String tableNameRegex, + List listTableSnapshots(String tableNameRegex, String snapshotNameRegex) throws IOException; /** @@ -1385,7 +1416,7 @@ public interface Admin extends Abortable, Closeable { * @throws IOException if a remote or network exception occurs */ @Deprecated - List listTableSnapshots(Pattern tableNamePattern, + List listTableSnapshots(Pattern tableNamePattern, Pattern snapshotNamePattern) throws IOException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 6aed02778b9..2cbeb9ae5af 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -95,7 +95,6 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest; @@ -157,6 +156,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaRetriever; import org.apache.hadoop.hbase.quotas.QuotaSettings; @@ -917,7 +917,6 @@ public class HBaseAdmin implements Admin { * or TimeoutException in case the wait timeout was not long enough to allow the * operation to complete. * - * @param desc table descriptor for table * @param tableName name of table to delete * @throws IOException if a remote or network exception occurs * @return the result of the async delete. You can use Future.get(long, TimeUnit) @@ -3820,23 +3819,21 @@ public class HBaseAdmin implements Admin { } /** - * Restore the specified snapshot on the original table. (The table must be disabled) - * If 'takeFailSafeSnapshot' is set to true, a snapshot of the current table is taken - * before executing the restore operation. - * In case of restore failure, the failsafe snapshot will be restored. - * If the restore completes without problem the failsafe snapshot is deleted. - * - * The failsafe snapshot name is configurable by using the property + * Restore the specified snapshot on the original table. (The table must be disabled) If + * 'takeFailSafeSnapshot' is set to true, a snapshot of the current table is taken before + * executing the restore operation. In case of restore failure, the failsafe snapshot will be + * restored. If the restore completes without problem the failsafe snapshot is deleted. The + * failsafe snapshot name is configurable by using the property * "hbase.snapshot.restore.failsafe.name". - * * @param snapshotName name of the snapshot to restore * @param takeFailSafeSnapshot true if the failsafe snapshot should be taken + * @param restoreAcl true to restore acl of snapshot into table. * @throws IOException if a remote or network exception occurs * @throws RestoreSnapshotException if snapshot failed to be restored * @throws IllegalArgumentException if the restore request is formatted incorrectly */ @Override - public void restoreSnapshot(final String snapshotName, boolean takeFailSafeSnapshot) + public void restoreSnapshot(final String snapshotName, boolean takeFailSafeSnapshot, boolean restoreAcl) throws IOException, RestoreSnapshotException { TableName tableName = null; for (SnapshotDescription snapshotInfo: listSnapshots()) { @@ -3853,7 +3850,7 @@ public class HBaseAdmin implements Admin { // The table does not exists, switch to clone. if (!tableExists(tableName)) { - cloneSnapshot(snapshotName, tableName); + cloneSnapshot(snapshotName, tableName, restoreAcl); return; } @@ -3877,13 +3874,13 @@ public class HBaseAdmin implements Admin { try { // Restore snapshot - internalRestoreSnapshot(snapshotName, tableName); + internalRestoreSnapshot(snapshotName, tableName, restoreAcl); } catch (IOException e) { // Somthing went wrong during the restore... // if the pre-restore snapshot is available try to rollback if (takeFailSafeSnapshot) { try { - internalRestoreSnapshot(failSafeSnapshotSnapshotName, tableName); + internalRestoreSnapshot(failSafeSnapshotSnapshotName, tableName, restoreAcl); String msg = "Restore snapshot=" + snapshotName + " failed. Rollback to snapshot=" + failSafeSnapshotSnapshotName + " succeeded."; LOG.error(msg, e); @@ -3909,6 +3906,12 @@ public class HBaseAdmin implements Admin { } } + @Override + public void restoreSnapshot(String snapshotName, boolean takeFailSafeSnapshot) + throws IOException, RestoreSnapshotException { + restoreSnapshot(snapshotName, takeFailSafeSnapshot, false); + } + /** * Create a new table by cloning the snapshot content. * @@ -3968,15 +3971,21 @@ public class HBaseAdmin implements Admin { * @throws IllegalArgumentException if the specified table has not a valid name */ @Override - public void cloneSnapshot(final String snapshotName, final TableName tableName) - throws IOException, TableExistsException, RestoreSnapshotException { + public void cloneSnapshot(final String snapshotName, final TableName tableName, + final boolean restoreAcl) throws IOException, TableExistsException, RestoreSnapshotException { if (tableExists(tableName)) { throw new TableExistsException(tableName); } - internalRestoreSnapshot(snapshotName, tableName); + internalRestoreSnapshot(snapshotName, tableName, restoreAcl); waitUntilTableIsEnabled(tableName); } + @Override + public void cloneSnapshot(String snapshotName, TableName tableName) + throws IOException, TableExistsException, RestoreSnapshotException { + cloneSnapshot(snapshotName, tableName, false); + } + /** * Execute a distributed procedure on a cluster synchronously with return data * @@ -4117,23 +4126,23 @@ public class HBaseAdmin implements Admin { } /** - * Execute Restore/Clone snapshot and wait for the server to complete (blocking). - * To check if the cloned table exists, use {@link #isTableAvailable} -- it is not safe to - * create an HTable instance to this table before it is available. + * Execute Restore/Clone snapshot and wait for the server to complete (blocking). To check if the + * cloned table exists, use {@link #isTableAvailable} -- it is not safe to create an HTable + * instance to this table before it is available. * @param snapshotName snapshot to restore * @param tableName table name to restore the snapshot on * @throws IOException if a remote or network exception occurs * @throws RestoreSnapshotException if snapshot failed to be restored * @throws IllegalArgumentException if the restore request is formatted incorrectly */ - private void internalRestoreSnapshot(final String snapshotName, final TableName - tableName) + private void internalRestoreSnapshot(final String snapshotName, final TableName tableName, + final boolean restoreAcl) throws IOException, RestoreSnapshotException { SnapshotDescription snapshot = SnapshotDescription.newBuilder() .setName(snapshotName).setTable(tableName.getNameAsString()).build(); // actually restore the snapshot - internalRestoreSnapshotAsync(snapshot); + internalRestoreSnapshotAsync(snapshot, restoreAcl); final IsRestoreSnapshotDoneRequest request = IsRestoreSnapshotDoneRequest.newBuilder() .setSnapshot(snapshot).build(); @@ -4177,12 +4186,12 @@ public class HBaseAdmin implements Admin { * @throws RestoreSnapshotException if snapshot failed to be restored * @throws IllegalArgumentException if the restore request is formatted incorrectly */ - private RestoreSnapshotResponse internalRestoreSnapshotAsync(final SnapshotDescription snapshot) - throws IOException, RestoreSnapshotException { + private RestoreSnapshotResponse internalRestoreSnapshotAsync(final SnapshotDescription snapshot, + final boolean restoreAcl) throws IOException, RestoreSnapshotException { ClientSnapshotDescriptionUtils.assertSnapshotRequestIsValid(snapshot); - final RestoreSnapshotRequest request = RestoreSnapshotRequest.newBuilder().setSnapshot(snapshot) - .build(); + final RestoreSnapshotRequest request = + RestoreSnapshotRequest.newBuilder().setSnapshot(snapshot).setRestoreACL(restoreAcl).build(); // run the snapshot restore on the master return executeCallable(new MasterCallable(getConnection()) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java index 4fb75482c39..499a9bbfb73 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java @@ -157,6 +157,10 @@ public class TablePermission extends Permission { return table; } + public void setTableName(TableName table) { + this.table = table; + } + public boolean hasFamily() { return family != null; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java index d439c8b0b31..686dbb81cbd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.util.Bytes; /** @@ -36,7 +36,7 @@ public class ClientSnapshotDescriptionUtils { * @throws IllegalArgumentException if the name of the snapshot or the name of the table to * snapshot are not valid names. */ - public static void assertSnapshotRequestIsValid(HBaseProtos.SnapshotDescription snapshot) + public static void assertSnapshotRequestIsValid(SnapshotDescription snapshot) throws IllegalArgumentException { // make sure the snapshot name is valid TableName.isLegalTableQualifierName(Bytes.toBytes(snapshot.getName()), true); @@ -52,12 +52,12 @@ public class ClientSnapshotDescriptionUtils { /** * Returns a single line (no \n) representation of snapshot metadata. Use this instead of - * {@link org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription#toString()}. We don't replace SnapshotDescrpition's toString + * {@link SnapshotDescription#toString()}. We don't replace SnapshotDescrpition's toString * because it is auto-generated by protoc. * @param ssd * @return Single line string with a summary of the snapshot parameters */ - public static String toString(HBaseProtos.SnapshotDescription ssd) { + public static String toString(SnapshotDescription ssd) { if (ssd == null) { return null; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java index d29c89cb58e..a1f59115ae9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java index cd2f66fa5cd..a88d8203837 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * General exception base class for when a snapshot fails diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java index 25cd583b03a..8ed42e07e03 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Thrown when a snapshot could not be restored due to a server-side error when restoring it. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java index 324d41f3c6a..d5501010a3d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Thrown when a snapshot could not be created due to a server-side error when diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java index 6ba45bddc92..6006136cae8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java index ff3cdcb87c8..0344aff56ca 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Thrown when a snapshot exists but should not diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java index 6385c27dfa4..afa589249b0 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java @@ -32,11 +32,11 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index 0e914907aac..3c4fb616d42 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -11141,1281 +11141,6 @@ public final class HBaseProtos { // @@protoc_insertion_point(class_scope:hbase.pb.NameInt64Pair) } - public interface SnapshotDescriptionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - /** - * required string name = 1; - */ - boolean hasName(); - /** - * required string name = 1; - */ - java.lang.String getName(); - /** - * required string name = 1; - */ - com.google.protobuf.ByteString - getNameBytes(); - - // optional string table = 2; - /** - * optional string table = 2; - * - *
-     * not needed for delete, but checked for in taking snapshot
-     * 
- */ - boolean hasTable(); - /** - * optional string table = 2; - * - *
-     * not needed for delete, but checked for in taking snapshot
-     * 
- */ - java.lang.String getTable(); - /** - * optional string table = 2; - * - *
-     * not needed for delete, but checked for in taking snapshot
-     * 
- */ - com.google.protobuf.ByteString - getTableBytes(); - - // optional int64 creation_time = 3 [default = 0]; - /** - * optional int64 creation_time = 3 [default = 0]; - */ - boolean hasCreationTime(); - /** - * optional int64 creation_time = 3 [default = 0]; - */ - long getCreationTime(); - - // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - boolean hasType(); - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType(); - - // optional int32 version = 5; - /** - * optional int32 version = 5; - */ - boolean hasVersion(); - /** - * optional int32 version = 5; - */ - int getVersion(); - - // optional string owner = 6; - /** - * optional string owner = 6; - */ - boolean hasOwner(); - /** - * optional string owner = 6; - */ - java.lang.String getOwner(); - /** - * optional string owner = 6; - */ - com.google.protobuf.ByteString - getOwnerBytes(); - } - /** - * Protobuf type {@code hbase.pb.SnapshotDescription} - * - *
-   **
-   * Description of the snapshot to take
-   * 
- */ - public static final class SnapshotDescription extends - com.google.protobuf.GeneratedMessage - implements SnapshotDescriptionOrBuilder { - // Use SnapshotDescription.newBuilder() to construct. - private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SnapshotDescription defaultInstance; - public static SnapshotDescription getDefaultInstance() { - return defaultInstance; - } - - public SnapshotDescription getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SnapshotDescription( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - table_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - creationTime_ = input.readInt64(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - type_ = value; - } - break; - } - case 40: { - bitField0_ |= 0x00000010; - version_ = input.readInt32(); - break; - } - case 50: { - bitField0_ |= 0x00000020; - owner_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SnapshotDescription parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotDescription(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - /** - * Protobuf enum {@code hbase.pb.SnapshotDescription.Type} - */ - public enum Type - implements com.google.protobuf.ProtocolMessageEnum { - /** - * DISABLED = 0; - */ - DISABLED(0, 0), - /** - * FLUSH = 1; - */ - FLUSH(1, 1), - /** - * SKIPFLUSH = 2; - */ - SKIPFLUSH(2, 2), - ; - - /** - * DISABLED = 0; - */ - public static final int DISABLED_VALUE = 0; - /** - * FLUSH = 1; - */ - public static final int FLUSH_VALUE = 1; - /** - * SKIPFLUSH = 2; - */ - public static final int SKIPFLUSH_VALUE = 2; - - - public final int getNumber() { return value; } - - public static Type valueOf(int value) { - switch (value) { - case 0: return DISABLED; - case 1: return FLUSH; - case 2: return SKIPFLUSH; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Type findValueByNumber(int number) { - return Type.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0); - } - - private static final Type[] VALUES = values(); - - public static Type valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Type(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type) - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - /** - * required string name = 1; - */ - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } - } - /** - * required string name = 1; - */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string table = 2; - public static final int TABLE_FIELD_NUMBER = 2; - private java.lang.Object table_; - /** - * optional string table = 2; - * - *
-     * not needed for delete, but checked for in taking snapshot
-     * 
- */ - public boolean hasTable() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional string table = 2; - * - *
-     * not needed for delete, but checked for in taking snapshot
-     * 
- */ - public java.lang.String getTable() { - java.lang.Object ref = table_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - table_ = s; - } - return s; - } - } - /** - * optional string table = 2; - * - *
-     * not needed for delete, but checked for in taking snapshot
-     * 
- */ - public com.google.protobuf.ByteString - getTableBytes() { - java.lang.Object ref = table_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - table_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional int64 creation_time = 3 [default = 0]; - public static final int CREATION_TIME_FIELD_NUMBER = 3; - private long creationTime_; - /** - * optional int64 creation_time = 3 [default = 0]; - */ - public boolean hasCreationTime() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional int64 creation_time = 3 [default = 0]; - */ - public long getCreationTime() { - return creationTime_; - } - - // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - public static final int TYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_; - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - public boolean hasType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { - return type_; - } - - // optional int32 version = 5; - public static final int VERSION_FIELD_NUMBER = 5; - private int version_; - /** - * optional int32 version = 5; - */ - public boolean hasVersion() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional int32 version = 5; - */ - public int getVersion() { - return version_; - } - - // optional string owner = 6; - public static final int OWNER_FIELD_NUMBER = 6; - private java.lang.Object owner_; - /** - * optional string owner = 6; - */ - public boolean hasOwner() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - /** - * optional string owner = 6; - */ - public java.lang.String getOwner() { - java.lang.Object ref = owner_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - owner_ = s; - } - return s; - } - } - /** - * optional string owner = 6; - */ - public com.google.protobuf.ByteString - getOwnerBytes() { - java.lang.Object ref = owner_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - owner_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - name_ = ""; - table_ = ""; - creationTime_ = 0L; - type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; - version_ = 0; - owner_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getTableBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeInt64(3, creationTime_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, type_.getNumber()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeInt32(5, version_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBytes(6, getOwnerBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getTableBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(3, creationTime_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, type_.getNumber()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(5, version_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, getOwnerBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasTable() == other.hasTable()); - if (hasTable()) { - result = result && getTable() - .equals(other.getTable()); - } - result = result && (hasCreationTime() == other.hasCreationTime()); - if (hasCreationTime()) { - result = result && (getCreationTime() - == other.getCreationTime()); - } - result = result && (hasType() == other.hasType()); - if (hasType()) { - result = result && - (getType() == other.getType()); - } - result = result && (hasVersion() == other.hasVersion()); - if (hasVersion()) { - result = result && (getVersion() - == other.getVersion()); - } - result = result && (hasOwner() == other.hasOwner()); - if (hasOwner()) { - result = result && getOwner() - .equals(other.getOwner()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasTable()) { - hash = (37 * hash) + TABLE_FIELD_NUMBER; - hash = (53 * hash) + getTable().hashCode(); - } - if (hasCreationTime()) { - hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCreationTime()); - } - if (hasType()) { - hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getType()); - } - if (hasVersion()) { - hash = (37 * hash) + VERSION_FIELD_NUMBER; - hash = (53 * hash) + getVersion(); - } - if (hasOwner()) { - hash = (37 * hash) + OWNER_FIELD_NUMBER; - hash = (53 * hash) + getOwner().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.SnapshotDescription} - * - *
-     **
-     * Description of the snapshot to take
-     * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - table_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - creationTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; - bitField0_ = (bitField0_ & ~0x00000008); - version_ = 0; - bitField0_ = (bitField0_ & ~0x00000010); - owner_ = ""; - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription build() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.table_ = table_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.creationTime_ = creationTime_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.type_ = type_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.version_ = version_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.owner_ = owner_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this; - if (other.hasName()) { - bitField0_ |= 0x00000001; - name_ = other.name_; - onChanged(); - } - if (other.hasTable()) { - bitField0_ |= 0x00000002; - table_ = other.table_; - onChanged(); - } - if (other.hasCreationTime()) { - setCreationTime(other.getCreationTime()); - } - if (other.hasType()) { - setType(other.getType()); - } - if (other.hasVersion()) { - setVersion(other.getVersion()); - } - if (other.hasOwner()) { - bitField0_ |= 0x00000020; - owner_ = other.owner_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - /** - * required string name = 1; - */ - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - name_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string name = 1; - */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string name = 1; - */ - public Builder setName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - /** - * required string name = 1; - */ - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - /** - * required string name = 1; - */ - public Builder setNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - - // optional string table = 2; - private java.lang.Object table_ = ""; - /** - * optional string table = 2; - * - *
-       * not needed for delete, but checked for in taking snapshot
-       * 
- */ - public boolean hasTable() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional string table = 2; - * - *
-       * not needed for delete, but checked for in taking snapshot
-       * 
- */ - public java.lang.String getTable() { - java.lang.Object ref = table_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - table_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string table = 2; - * - *
-       * not needed for delete, but checked for in taking snapshot
-       * 
- */ - public com.google.protobuf.ByteString - getTableBytes() { - java.lang.Object ref = table_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - table_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string table = 2; - * - *
-       * not needed for delete, but checked for in taking snapshot
-       * 
- */ - public Builder setTable( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - table_ = value; - onChanged(); - return this; - } - /** - * optional string table = 2; - * - *
-       * not needed for delete, but checked for in taking snapshot
-       * 
- */ - public Builder clearTable() { - bitField0_ = (bitField0_ & ~0x00000002); - table_ = getDefaultInstance().getTable(); - onChanged(); - return this; - } - /** - * optional string table = 2; - * - *
-       * not needed for delete, but checked for in taking snapshot
-       * 
- */ - public Builder setTableBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - table_ = value; - onChanged(); - return this; - } - - // optional int64 creation_time = 3 [default = 0]; - private long creationTime_ ; - /** - * optional int64 creation_time = 3 [default = 0]; - */ - public boolean hasCreationTime() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional int64 creation_time = 3 [default = 0]; - */ - public long getCreationTime() { - return creationTime_; - } - /** - * optional int64 creation_time = 3 [default = 0]; - */ - public Builder setCreationTime(long value) { - bitField0_ |= 0x00000004; - creationTime_ = value; - onChanged(); - return this; - } - /** - * optional int64 creation_time = 3 [default = 0]; - */ - public Builder clearCreationTime() { - bitField0_ = (bitField0_ & ~0x00000004); - creationTime_ = 0L; - onChanged(); - return this; - } - - // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - public boolean hasType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { - return type_; - } - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - type_ = value; - onChanged(); - return this; - } - /** - * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; - */ - public Builder clearType() { - bitField0_ = (bitField0_ & ~0x00000008); - type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; - onChanged(); - return this; - } - - // optional int32 version = 5; - private int version_ ; - /** - * optional int32 version = 5; - */ - public boolean hasVersion() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional int32 version = 5; - */ - public int getVersion() { - return version_; - } - /** - * optional int32 version = 5; - */ - public Builder setVersion(int value) { - bitField0_ |= 0x00000010; - version_ = value; - onChanged(); - return this; - } - /** - * optional int32 version = 5; - */ - public Builder clearVersion() { - bitField0_ = (bitField0_ & ~0x00000010); - version_ = 0; - onChanged(); - return this; - } - - // optional string owner = 6; - private java.lang.Object owner_ = ""; - /** - * optional string owner = 6; - */ - public boolean hasOwner() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - /** - * optional string owner = 6; - */ - public java.lang.String getOwner() { - java.lang.Object ref = owner_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - owner_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string owner = 6; - */ - public com.google.protobuf.ByteString - getOwnerBytes() { - java.lang.Object ref = owner_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - owner_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string owner = 6; - */ - public Builder setOwner( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000020; - owner_ = value; - onChanged(); - return this; - } - /** - * optional string owner = 6; - */ - public Builder clearOwner() { - bitField0_ = (bitField0_ & ~0x00000020); - owner_ = getDefaultInstance().getOwner(); - onChanged(); - return this; - } - /** - * optional string owner = 6; - */ - public Builder setOwnerBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000020; - owner_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription) - } - - static { - defaultInstance = new SnapshotDescription(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription) - } - public interface ProcedureDescriptionOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -19003,11 +17728,6 @@ public final class HBaseProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_SnapshotDescription_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_ProcedureDescription_descriptor; private static @@ -19092,36 +17812,30 @@ public final class HBaseProtos { "\002(\t\",\n\rNameBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005va" + "lue\030\002 \001(\014\"/\n\016BytesBytesPair\022\r\n\005first\030\001 \002" + "(\014\022\016\n\006second\030\002 \002(\014\",\n\rNameInt64Pair\022\014\n\004n", - "ame\030\001 \001(\t\022\r\n\005value\030\002 \001(\003\"\325\001\n\023SnapshotDes" + - "cription\022\014\n\004name\030\001 \002(\t\022\r\n\005table\030\002 \001(\t\022\030\n" + - "\rcreation_time\030\003 \001(\003:\0010\0227\n\004type\030\004 \001(\0162\"." + - "hbase.pb.SnapshotDescription.Type:\005FLUSH" + - "\022\017\n\007version\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\".\n\004Type" + - "\022\014\n\010DISABLED\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002" + - "\"\206\001\n\024ProcedureDescription\022\021\n\tsignature\030\001" + - " \002(\t\022\020\n\010instance\030\002 \001(\t\022\030\n\rcreation_time\030" + - "\003 \001(\003:\0010\022/\n\rconfiguration\030\004 \003(\0132\030.hbase." + - "pb.NameStringPair\"\n\n\010EmptyMsg\"\033\n\007LongMsg", - "\022\020\n\010long_msg\030\001 \002(\003\"\037\n\tDoubleMsg\022\022\n\ndoubl" + - "e_msg\030\001 \002(\001\"\'\n\rBigDecimalMsg\022\026\n\016bigdecim" + - "al_msg\030\001 \002(\014\"5\n\004UUID\022\026\n\016least_sig_bits\030\001" + - " \002(\004\022\025\n\rmost_sig_bits\030\002 \002(\004\"T\n\023Namespace" + - "Descriptor\022\014\n\004name\030\001 \002(\014\022/\n\rconfiguratio" + - "n\030\002 \003(\0132\030.hbase.pb.NameStringPair\"\235\001\n\013Ve" + - "rsionInfo\022\017\n\007version\030\001 \002(\t\022\013\n\003url\030\002 \002(\t\022" + - "\020\n\010revision\030\003 \002(\t\022\014\n\004user\030\004 \002(\t\022\014\n\004date\030" + - "\005 \002(\t\022\024\n\014src_checksum\030\006 \002(\t\022\025\n\rversion_m" + - "ajor\030\007 \001(\r\022\025\n\rversion_minor\030\010 \001(\r\"Q\n\020Reg", - "ionServerInfo\022\020\n\010infoPort\030\001 \001(\005\022+\n\014versi" + - "on_info\030\002 \001(\0132\025.hbase.pb.VersionInfo*r\n\013" + - "CompareType\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001" + - "\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR" + - "_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP\020\006*n\n\010Time" + - "Unit\022\017\n\013NANOSECONDS\020\001\022\020\n\014MICROSECONDS\020\002\022" + - "\020\n\014MILLISECONDS\020\003\022\013\n\007SECONDS\020\004\022\013\n\007MINUTE" + - "S\020\005\022\t\n\005HOURS\020\006\022\010\n\004DAYS\020\007B>\n*org.apache.h" + - "adoop.hbase.protobuf.generatedB\013HBasePro" + - "tosH\001\240\001\001" + "ame\030\001 \001(\t\022\r\n\005value\030\002 \001(\003\"\206\001\n\024ProcedureDe" + + "scription\022\021\n\tsignature\030\001 \002(\t\022\020\n\010instance" + + "\030\002 \001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\022/\n\rconf" + + "iguration\030\004 \003(\0132\030.hbase.pb.NameStringPai" + + "r\"\n\n\010EmptyMsg\"\033\n\007LongMsg\022\020\n\010long_msg\030\001 \002" + + "(\003\"\037\n\tDoubleMsg\022\022\n\ndouble_msg\030\001 \002(\001\"\'\n\rB" + + "igDecimalMsg\022\026\n\016bigdecimal_msg\030\001 \002(\014\"5\n\004" + + "UUID\022\026\n\016least_sig_bits\030\001 \002(\004\022\025\n\rmost_sig" + + "_bits\030\002 \002(\004\"T\n\023NamespaceDescriptor\022\014\n\004na" + + "me\030\001 \002(\014\022/\n\rconfiguration\030\002 \003(\0132\030.hbase.", + "pb.NameStringPair\"\235\001\n\013VersionInfo\022\017\n\007ver" + + "sion\030\001 \002(\t\022\013\n\003url\030\002 \002(\t\022\020\n\010revision\030\003 \002(" + + "\t\022\014\n\004user\030\004 \002(\t\022\014\n\004date\030\005 \002(\t\022\024\n\014src_che" + + "cksum\030\006 \002(\t\022\025\n\rversion_major\030\007 \001(\r\022\025\n\rve" + + "rsion_minor\030\010 \001(\r\"Q\n\020RegionServerInfo\022\020\n" + + "\010infoPort\030\001 \001(\005\022+\n\014version_info\030\002 \001(\0132\025." + + "hbase.pb.VersionInfo*r\n\013CompareType\022\010\n\004L" + + "ESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tN" + + "OT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREA" + + "TER\020\005\022\t\n\005NO_OP\020\006*n\n\010TimeUnit\022\017\n\013NANOSECO", + "NDS\020\001\022\020\n\014MICROSECONDS\020\002\022\020\n\014MILLISECONDS\020" + + "\003\022\013\n\007SECONDS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HOURS\020\006\022\010" + + "\n\004DAYS\020\007B>\n*org.apache.hadoop.hbase.prot" + + "obuf.generatedB\013HBaseProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -19212,62 +17926,56 @@ public final class HBaseProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_NameInt64Pair_descriptor, new java.lang.String[] { "Name", "Value", }); - internal_static_hbase_pb_SnapshotDescription_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SnapshotDescription_descriptor, - new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", }); internal_static_hbase_pb_ProcedureDescription_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(14); internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ProcedureDescription_descriptor, new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", }); internal_static_hbase_pb_EmptyMsg_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(15); internal_static_hbase_pb_EmptyMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_EmptyMsg_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_LongMsg_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(16); internal_static_hbase_pb_LongMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_LongMsg_descriptor, new java.lang.String[] { "LongMsg", }); internal_static_hbase_pb_DoubleMsg_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(17); internal_static_hbase_pb_DoubleMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_DoubleMsg_descriptor, new java.lang.String[] { "DoubleMsg", }); internal_static_hbase_pb_BigDecimalMsg_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(18); internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BigDecimalMsg_descriptor, new java.lang.String[] { "BigdecimalMsg", }); internal_static_hbase_pb_UUID_descriptor = - getDescriptor().getMessageTypes().get(20); + getDescriptor().getMessageTypes().get(19); internal_static_hbase_pb_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_UUID_descriptor, new java.lang.String[] { "LeastSigBits", "MostSigBits", }); internal_static_hbase_pb_NamespaceDescriptor_descriptor = - getDescriptor().getMessageTypes().get(21); + getDescriptor().getMessageTypes().get(20); internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_NamespaceDescriptor_descriptor, new java.lang.String[] { "Name", "Configuration", }); internal_static_hbase_pb_VersionInfo_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(21); internal_static_hbase_pb_VersionInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_VersionInfo_descriptor, new java.lang.String[] { "Version", "Url", "Revision", "User", "Date", "SrcChecksum", "VersionMajor", "VersionMinor", }); internal_static_hbase_pb_RegionServerInfo_descriptor = - getDescriptor().getMessageTypes().get(23); + getDescriptor().getMessageTypes().get(22); internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionServerInfo_descriptor, diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index edf2e855e44..2661dc1c499 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -38275,11 +38275,11 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot(); /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code hbase.pb.SnapshotRequest} @@ -38333,11 +38333,11 @@ public final class MasterProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); @@ -38387,7 +38387,7 @@ public final class MasterProtos { private int bitField0_; // required .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -38397,18 +38397,18 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -38602,7 +38602,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } @@ -38698,9 +38698,9 @@ public final class MasterProtos { private int bitField0_; // required .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -38710,7 +38710,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { @@ -38720,7 +38720,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -38737,7 +38737,7 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ public Builder setSnapshot( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); @@ -38750,12 +38750,12 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ != org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } @@ -38771,7 +38771,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); @@ -38782,7 +38782,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); @@ -38790,7 +38790,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { @@ -38801,11 +38801,11 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); @@ -39612,12 +39612,12 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - java.util.List + java.util.List getSnapshotsList(); /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshots(int index); /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ @@ -39625,12 +39625,12 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - java.util.List + java.util.List getSnapshotsOrBuilderList(); /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index); } /** @@ -39686,10 +39686,10 @@ public final class MasterProtos { } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - snapshots_ = new java.util.ArrayList(); + snapshots_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } - snapshots_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry)); + snapshots_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry)); break; } } @@ -39736,17 +39736,17 @@ public final class MasterProtos { // repeated .hbase.pb.SnapshotDescription snapshots = 1; public static final int SNAPSHOTS_FIELD_NUMBER = 1; - private java.util.List snapshots_; + private java.util.List snapshots_; /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public java.util.List getSnapshotsList() { + public java.util.List getSnapshotsList() { return snapshots_; } /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public java.util.List + public java.util.List getSnapshotsOrBuilderList() { return snapshots_; } @@ -39759,13 +39759,13 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshots(int index) { return snapshots_.get(index); } /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index) { return snapshots_.get(index); } @@ -40076,22 +40076,22 @@ public final class MasterProtos { private int bitField0_; // repeated .hbase.pb.SnapshotDescription snapshots = 1; - private java.util.List snapshots_ = + private java.util.List snapshots_ = java.util.Collections.emptyList(); private void ensureSnapshotsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - snapshots_ = new java.util.ArrayList(snapshots_); + snapshots_ = new java.util.ArrayList(snapshots_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_; /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public java.util.List getSnapshotsList() { + public java.util.List getSnapshotsList() { if (snapshotsBuilder_ == null) { return java.util.Collections.unmodifiableList(snapshots_); } else { @@ -40111,7 +40111,7 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshots(int index) { if (snapshotsBuilder_ == null) { return snapshots_.get(index); } else { @@ -40122,7 +40122,7 @@ public final class MasterProtos { * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ public Builder setSnapshots( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -40139,7 +40139,7 @@ public final class MasterProtos { * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ public Builder setSnapshots( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.set(index, builderForValue.build()); @@ -40152,7 +40152,7 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public Builder addSnapshots(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder addSnapshots(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -40169,7 +40169,7 @@ public final class MasterProtos { * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ public Builder addSnapshots( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotsBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -40186,7 +40186,7 @@ public final class MasterProtos { * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ public Builder addSnapshots( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.add(builderForValue.build()); @@ -40200,7 +40200,7 @@ public final class MasterProtos { * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ public Builder addSnapshots( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); snapshots_.add(index, builderForValue.build()); @@ -40214,7 +40214,7 @@ public final class MasterProtos { * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ public Builder addAllSnapshots( - java.lang.Iterable values) { + java.lang.Iterable values) { if (snapshotsBuilder_ == null) { ensureSnapshotsIsMutable(); super.addAll(values, snapshots_); @@ -40253,14 +40253,14 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotsBuilder( + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotsBuilder( int index) { return getSnapshotsFieldBuilder().getBuilder(index); } /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder( int index) { if (snapshotsBuilder_ == null) { return snapshots_.get(index); } else { @@ -40270,7 +40270,7 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public java.util.List + public java.util.List getSnapshotsOrBuilderList() { if (snapshotsBuilder_ != null) { return snapshotsBuilder_.getMessageOrBuilderList(); @@ -40281,31 +40281,31 @@ public final class MasterProtos { /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder addSnapshotsBuilder() { return getSnapshotsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()); } /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder( + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder addSnapshotsBuilder( int index) { return getSnapshotsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()); + index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()); } /** * repeated .hbase.pb.SnapshotDescription snapshots = 1; */ - public java.util.List + public java.util.List getSnapshotsBuilderList() { return getSnapshotsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotsFieldBuilder() { if (snapshotsBuilder_ == null) { snapshotsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshots_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), @@ -40337,11 +40337,11 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot(); /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code hbase.pb.DeleteSnapshotRequest} @@ -40395,11 +40395,11 @@ public final class MasterProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); @@ -40449,7 +40449,7 @@ public final class MasterProtos { private int bitField0_; // required .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -40459,18 +40459,18 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -40664,7 +40664,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } @@ -40760,9 +40760,9 @@ public final class MasterProtos { private int bitField0_; // required .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -40772,7 +40772,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { @@ -40782,7 +40782,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -40799,7 +40799,7 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ public Builder setSnapshot( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); @@ -40812,12 +40812,12 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ != org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } @@ -40833,7 +40833,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); @@ -40844,7 +40844,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); @@ -40852,7 +40852,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { @@ -40863,11 +40863,11 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); @@ -41236,11 +41236,21 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot(); /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + + // optional bool restoreACL = 2 [default = false]; + /** + * optional bool restoreACL = 2 [default = false]; + */ + boolean hasRestoreACL(); + /** + * optional bool restoreACL = 2 [default = false]; + */ + boolean getRestoreACL(); } /** * Protobuf type {@code hbase.pb.RestoreSnapshotRequest} @@ -41294,11 +41304,11 @@ public final class MasterProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); @@ -41306,6 +41316,11 @@ public final class MasterProtos { bitField0_ |= 0x00000001; break; } + case 16: { + bitField0_ |= 0x00000002; + restoreACL_ = input.readBool(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -41348,7 +41363,7 @@ public final class MasterProtos { private int bitField0_; // required .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -41358,18 +41373,35 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } + // optional bool restoreACL = 2 [default = false]; + public static final int RESTOREACL_FIELD_NUMBER = 2; + private boolean restoreACL_; + /** + * optional bool restoreACL = 2 [default = false]; + */ + public boolean hasRestoreACL() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool restoreACL = 2 [default = false]; + */ + public boolean getRestoreACL() { + return restoreACL_; + } + private void initFields() { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); + restoreACL_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -41394,6 +41426,9 @@ public final class MasterProtos { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, snapshot_); } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, restoreACL_); + } getUnknownFields().writeTo(output); } @@ -41407,6 +41442,10 @@ public final class MasterProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, snapshot_); } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, restoreACL_); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -41435,6 +41474,11 @@ public final class MasterProtos { result = result && getSnapshot() .equals(other.getSnapshot()); } + result = result && (hasRestoreACL() == other.hasRestoreACL()); + if (hasRestoreACL()) { + result = result && (getRestoreACL() + == other.getRestoreACL()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -41452,6 +41496,10 @@ public final class MasterProtos { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } + if (hasRestoreACL()) { + hash = (37 * hash) + RESTOREACL_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getRestoreACL()); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -41563,11 +41611,13 @@ public final class MasterProtos { public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); + restoreACL_ = false; + bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -41604,6 +41654,10 @@ public final class MasterProtos { } else { result.snapshot_ = snapshotBuilder_.build(); } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.restoreACL_ = restoreACL_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -41623,6 +41677,9 @@ public final class MasterProtos { if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } + if (other.hasRestoreACL()) { + setRestoreACL(other.getRestoreACL()); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -41659,9 +41716,9 @@ public final class MasterProtos { private int bitField0_; // required .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -41671,7 +41728,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { @@ -41681,7 +41738,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -41698,7 +41755,7 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ public Builder setSnapshot( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); @@ -41711,12 +41768,12 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ != org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } @@ -41732,7 +41789,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); @@ -41743,7 +41800,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); @@ -41751,7 +41808,7 @@ public final class MasterProtos { /** * required .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { @@ -41762,11 +41819,11 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); @@ -41775,6 +41832,39 @@ public final class MasterProtos { return snapshotBuilder_; } + // optional bool restoreACL = 2 [default = false]; + private boolean restoreACL_ ; + /** + * optional bool restoreACL = 2 [default = false]; + */ + public boolean hasRestoreACL() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool restoreACL = 2 [default = false]; + */ + public boolean getRestoreACL() { + return restoreACL_; + } + /** + * optional bool restoreACL = 2 [default = false]; + */ + public Builder setRestoreACL(boolean value) { + bitField0_ |= 0x00000002; + restoreACL_ = value; + onChanged(); + return this; + } + /** + * optional bool restoreACL = 2 [default = false]; + */ + public Builder clearRestoreACL() { + bitField0_ = (bitField0_ & ~0x00000002); + restoreACL_ = false; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RestoreSnapshotRequest) } @@ -42135,11 +42225,11 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot(); /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code hbase.pb.IsSnapshotDoneRequest} @@ -42198,11 +42288,11 @@ public final class MasterProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); @@ -42252,7 +42342,7 @@ public final class MasterProtos { private int bitField0_; // optional .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -42262,18 +42352,18 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -42470,7 +42560,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } @@ -42564,9 +42654,9 @@ public final class MasterProtos { private int bitField0_; // optional .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -42576,7 +42666,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { @@ -42586,7 +42676,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -42603,7 +42693,7 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 1; */ public Builder setSnapshot( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); @@ -42616,12 +42706,12 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ != org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } @@ -42637,7 +42727,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); @@ -42648,7 +42738,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); @@ -42656,7 +42746,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { @@ -42667,11 +42757,11 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 1; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); @@ -42712,11 +42802,11 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot(); /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code hbase.pb.IsSnapshotDoneResponse} @@ -42775,11 +42865,11 @@ public final class MasterProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = snapshot_.toBuilder(); } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); @@ -42845,7 +42935,7 @@ public final class MasterProtos { // optional .hbase.pb.SnapshotDescription snapshot = 2; public static final int SNAPSHOT_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_; /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ @@ -42855,19 +42945,19 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { done_ = false; - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -43077,7 +43167,7 @@ public final class MasterProtos { done_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } @@ -43211,9 +43301,9 @@ public final class MasterProtos { } // optional .hbase.pb.SnapshotDescription snapshot = 2; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ @@ -43223,7 +43313,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { @@ -43233,7 +43323,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -43250,7 +43340,7 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 2; */ public Builder setSnapshot( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); @@ -43263,12 +43353,12 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ != org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } @@ -43284,7 +43374,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); @@ -43295,7 +43385,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSnapshotFieldBuilder().getBuilder(); @@ -43303,7 +43393,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { @@ -43314,11 +43404,11 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 2; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); @@ -43349,11 +43439,11 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot(); /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** * Protobuf type {@code hbase.pb.IsRestoreSnapshotDoneRequest} @@ -43407,11 +43497,11 @@ public final class MasterProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = snapshot_.toBuilder(); } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(snapshot_); snapshot_ = subBuilder.buildPartial(); @@ -43461,7 +43551,7 @@ public final class MasterProtos { private int bitField0_; // optional .hbase.pb.SnapshotDescription snapshot = 1; public static final int SNAPSHOT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -43471,18 +43561,18 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { return snapshot_; } /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { return snapshot_; } private void initFields() { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -43674,7 +43764,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); } else { snapshotBuilder_.clear(); } @@ -43768,9 +43858,9 @@ public final class MasterProtos { private int bitField0_; // optional .hbase.pb.SnapshotDescription snapshot = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ @@ -43780,7 +43870,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_; } else { @@ -43790,7 +43880,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -43807,7 +43897,7 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 1; */ public Builder setSnapshot( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); onChanged(); @@ -43820,12 +43910,12 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) { + public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ != org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) { snapshot_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); } else { snapshot_ = value; } @@ -43841,7 +43931,7 @@ public final class MasterProtos { */ public Builder clearSnapshot() { if (snapshotBuilder_ == null) { - snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); + snapshot_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); onChanged(); } else { snapshotBuilder_.clear(); @@ -43852,7 +43942,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSnapshotFieldBuilder().getBuilder(); @@ -43860,7 +43950,7 @@ public final class MasterProtos { /** * optional .hbase.pb.SnapshotDescription snapshot = 1; */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { @@ -43871,11 +43961,11 @@ public final class MasterProtos { * optional .hbase.pb.SnapshotDescription snapshot = 1; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>( snapshot_, getParentForChildren(), isClean()); @@ -66093,338 +66183,339 @@ public final class MasterProtos { "\n\014Master.proto\022\010hbase.pb\032\013HBase.proto\032\014C" + "lient.proto\032\023ClusterStatus.proto\032\023ErrorH" + "andling.proto\032\017Procedure.proto\032\013Quota.pr" + - "oto\"\234\001\n\020AddColumnRequest\022\'\n\ntable_name\030\001" + - " \002(\0132\023.hbase.pb.TableName\0225\n\017column_fami" + - "lies\030\002 \002(\0132\034.hbase.pb.ColumnFamilySchema" + - "\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:" + - "\0010\"\023\n\021AddColumnResponse\"}\n\023DeleteColumnR" + - "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" + - "bleName\022\023\n\013column_name\030\002 \002(\014\022\026\n\013nonce_gr", - "oup\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\026\n\024Delete" + - "ColumnResponse\"\237\001\n\023ModifyColumnRequest\022\'" + - "\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableName\022" + - "5\n\017column_families\030\002 \002(\0132\034.hbase.pb.Colu" + - "mnFamilySchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020" + - "\n\005nonce\030\004 \001(\004:\0010\"\026\n\024ModifyColumnResponse" + - "\"n\n\021MoveRegionRequest\022)\n\006region\030\001 \002(\0132\031." + - "hbase.pb.RegionSpecifier\022.\n\020dest_server_" + - "name\030\002 \001(\0132\024.hbase.pb.ServerName\"\024\n\022Move" + - "RegionResponse\"\222\001\n\035DispatchMergingRegion", - "sRequest\022+\n\010region_a\030\001 \002(\0132\031.hbase.pb.Re" + - "gionSpecifier\022+\n\010region_b\030\002 \002(\0132\031.hbase." + - "pb.RegionSpecifier\022\027\n\010forcible\030\003 \001(\010:\005fa" + - "lse\" \n\036DispatchMergingRegionsResponse\"@\n" + - "\023AssignRegionRequest\022)\n\006region\030\001 \002(\0132\031.h" + - "base.pb.RegionSpecifier\"\026\n\024AssignRegionR" + - "esponse\"X\n\025UnassignRegionRequest\022)\n\006regi" + - "on\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022\024\n\005f" + - "orce\030\002 \001(\010:\005false\"\030\n\026UnassignRegionRespo" + - "nse\"A\n\024OfflineRegionRequest\022)\n\006region\030\001 ", - "\002(\0132\031.hbase.pb.RegionSpecifier\"\027\n\025Offlin" + - "eRegionResponse\"\177\n\022CreateTableRequest\022+\n" + - "\014table_schema\030\001 \002(\0132\025.hbase.pb.TableSche" + - "ma\022\022\n\nsplit_keys\030\002 \003(\014\022\026\n\013nonce_group\030\003 " + - "\001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023CreateTableR" + - "esponse\022\017\n\007proc_id\030\001 \001(\004\"g\n\022DeleteTableR" + - "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" + - "bleName\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce" + - "\030\003 \001(\004:\0010\"&\n\023DeleteTableResponse\022\017\n\007proc" + - "_id\030\001 \001(\004\"\207\001\n\024TruncateTableRequest\022&\n\tta", - "bleName\030\001 \002(\0132\023.hbase.pb.TableName\022\035\n\016pr" + - "eserveSplits\030\002 \001(\010:\005false\022\026\n\013nonce_group" + - "\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\027\n\025TruncateT" + - "ableResponse\"g\n\022EnableTableRequest\022\'\n\nta" + - "ble_name\030\001 \002(\0132\023.hbase.pb.TableName\022\026\n\013n" + - "once_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n" + - "\023EnableTableResponse\022\017\n\007proc_id\030\001 \001(\004\"h\n" + - "\023DisableTableRequest\022\'\n\ntable_name\030\001 \002(\013" + - "2\023.hbase.pb.TableName\022\026\n\013nonce_group\030\002 \001" + - "(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\'\n\024DisableTableR", - "esponse\022\017\n\007proc_id\030\001 \001(\004\"\224\001\n\022ModifyTable" + - "Request\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.T" + - "ableName\022+\n\014table_schema\030\002 \002(\0132\025.hbase.p" + - "b.TableSchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n" + - "\005nonce\030\004 \001(\004:\0010\"\025\n\023ModifyTableResponse\"~" + - "\n\026CreateNamespaceRequest\022:\n\023namespaceDes" + - "criptor\030\001 \002(\0132\035.hbase.pb.NamespaceDescri" + - "ptor\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 " + - "\001(\004:\0010\"\031\n\027CreateNamespaceResponse\"Y\n\026Del" + - "eteNamespaceRequest\022\025\n\rnamespaceName\030\001 \002", - "(\t\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(" + - "\004:\0010\"\031\n\027DeleteNamespaceResponse\"~\n\026Modif" + - "yNamespaceRequest\022:\n\023namespaceDescriptor" + - "\030\001 \002(\0132\035.hbase.pb.NamespaceDescriptor\022\026\n" + - "\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"" + - "\031\n\027ModifyNamespaceResponse\"6\n\035GetNamespa" + - "ceDescriptorRequest\022\025\n\rnamespaceName\030\001 \002" + - "(\t\"\\\n\036GetNamespaceDescriptorResponse\022:\n\023" + - "namespaceDescriptor\030\001 \002(\0132\035.hbase.pb.Nam" + - "espaceDescriptor\"!\n\037ListNamespaceDescrip", - "torsRequest\"^\n ListNamespaceDescriptorsR" + - "esponse\022:\n\023namespaceDescriptor\030\001 \003(\0132\035.h" + - "base.pb.NamespaceDescriptor\"?\n&ListTable" + - "DescriptorsByNamespaceRequest\022\025\n\rnamespa" + - "ceName\030\001 \002(\t\"U\n\'ListTableDescriptorsByNa" + - "mespaceResponse\022*\n\013tableSchema\030\001 \003(\0132\025.h" + - "base.pb.TableSchema\"9\n ListTableNamesByN" + - "amespaceRequest\022\025\n\rnamespaceName\030\001 \002(\t\"K" + - "\n!ListTableNamesByNamespaceResponse\022&\n\tt" + - "ableName\030\001 \003(\0132\023.hbase.pb.TableName\"\021\n\017S", - "hutdownRequest\"\022\n\020ShutdownResponse\"\023\n\021St" + - "opMasterRequest\"\024\n\022StopMasterResponse\"\034\n" + - "\032IsInMaintenanceModeRequest\"8\n\033IsInMaint" + - "enanceModeResponse\022\031\n\021inMaintenanceMode\030" + - "\001 \002(\010\"\037\n\016BalanceRequest\022\r\n\005force\030\001 \001(\010\"\'" + - "\n\017BalanceResponse\022\024\n\014balancer_ran\030\001 \002(\010\"" + - "<\n\031SetBalancerRunningRequest\022\n\n\002on\030\001 \002(\010" + - "\022\023\n\013synchronous\030\002 \001(\010\"8\n\032SetBalancerRunn" + - "ingResponse\022\032\n\022prev_balance_value\030\001 \001(\010\"" + - "\032\n\030IsBalancerEnabledRequest\",\n\031IsBalance", - "rEnabledResponse\022\017\n\007enabled\030\001 \002(\010\"w\n\035Set" + - "SplitOrMergeEnabledRequest\022\017\n\007enabled\030\001 " + - "\002(\010\022\023\n\013synchronous\030\002 \001(\010\0220\n\014switch_types" + - "\030\003 \003(\0162\032.hbase.pb.MasterSwitchType\"4\n\036Se" + - "tSplitOrMergeEnabledResponse\022\022\n\nprev_val" + - "ue\030\001 \003(\010\"O\n\034IsSplitOrMergeEnabledRequest" + - "\022/\n\013switch_type\030\001 \002(\0162\032.hbase.pb.MasterS" + - "witchType\"0\n\035IsSplitOrMergeEnabledRespon" + - "se\022\017\n\007enabled\030\001 \002(\010\"\022\n\020NormalizeRequest\"" + - "+\n\021NormalizeResponse\022\026\n\016normalizer_ran\030\001", - " \002(\010\")\n\033SetNormalizerRunningRequest\022\n\n\002o" + - "n\030\001 \002(\010\"=\n\034SetNormalizerRunningResponse\022" + - "\035\n\025prev_normalizer_value\030\001 \001(\010\"\034\n\032IsNorm" + - "alizerEnabledRequest\".\n\033IsNormalizerEnab" + - "ledResponse\022\017\n\007enabled\030\001 \002(\010\"\027\n\025RunCatal" + - "ogScanRequest\"-\n\026RunCatalogScanResponse\022" + - "\023\n\013scan_result\030\001 \001(\005\"-\n\033EnableCatalogJan" + - "itorRequest\022\016\n\006enable\030\001 \002(\010\"2\n\034EnableCat" + - "alogJanitorResponse\022\022\n\nprev_value\030\001 \001(\010\"" + - " \n\036IsCatalogJanitorEnabledRequest\"0\n\037IsC", - "atalogJanitorEnabledResponse\022\r\n\005value\030\001 " + - "\002(\010\"\030\n\026RunCleanerChoreRequest\"4\n\027RunClea" + - "nerChoreResponse\022\031\n\021cleaner_chore_ran\030\001 " + - "\002(\010\"+\n\035SetCleanerChoreRunningRequest\022\n\n\002" + - "on\030\001 \002(\010\"4\n\036SetCleanerChoreRunningRespon" + - "se\022\022\n\nprev_value\030\001 \001(\010\"\036\n\034IsCleanerChore" + - "EnabledRequest\".\n\035IsCleanerChoreEnabledR" + - "esponse\022\r\n\005value\030\001 \002(\010\"B\n\017SnapshotReques" + - "t\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotD" + - "escription\",\n\020SnapshotResponse\022\030\n\020expect", - "ed_timeout\030\001 \002(\003\"\036\n\034GetCompletedSnapshot" + - "sRequest\"Q\n\035GetCompletedSnapshotsRespons" + - "e\0220\n\tsnapshots\030\001 \003(\0132\035.hbase.pb.Snapshot" + - "Description\"H\n\025DeleteSnapshotRequest\022/\n\010" + - "snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotDescri" + - "ption\"\030\n\026DeleteSnapshotResponse\"I\n\026Resto" + - "reSnapshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hb" + - "ase.pb.SnapshotDescription\"\031\n\027RestoreSna" + - "pshotResponse\"H\n\025IsSnapshotDoneRequest\022/" + - "\n\010snapshot\030\001 \001(\0132\035.hbase.pb.SnapshotDesc", - "ription\"^\n\026IsSnapshotDoneResponse\022\023\n\004don" + - "e\030\001 \001(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hbase" + - ".pb.SnapshotDescription\"O\n\034IsRestoreSnap" + - "shotDoneRequest\022/\n\010snapshot\030\001 \001(\0132\035.hbas" + - "e.pb.SnapshotDescription\"4\n\035IsRestoreSna" + - "pshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\"F" + - "\n\033GetSchemaAlterStatusRequest\022\'\n\ntable_n" + - "ame\030\001 \002(\0132\023.hbase.pb.TableName\"T\n\034GetSch" + - "emaAlterStatusResponse\022\035\n\025yet_to_update_" + - "regions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\213\001\n", - "\032GetTableDescriptorsRequest\022(\n\013table_nam" + - "es\030\001 \003(\0132\023.hbase.pb.TableName\022\r\n\005regex\030\002" + - " \001(\t\022!\n\022include_sys_tables\030\003 \001(\010:\005false\022" + - "\021\n\tnamespace\030\004 \001(\t\"J\n\033GetTableDescriptor" + - "sResponse\022+\n\014table_schema\030\001 \003(\0132\025.hbase." + - "pb.TableSchema\"[\n\024GetTableNamesRequest\022\r" + - "\n\005regex\030\001 \001(\t\022!\n\022include_sys_tables\030\002 \001(" + - "\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTableN" + - "amesResponse\022(\n\013table_names\030\001 \003(\0132\023.hbas" + - "e.pb.TableName\"\031\n\027GetClusterStatusReques", - "t\"K\n\030GetClusterStatusResponse\022/\n\016cluster" + - "_status\030\001 \002(\0132\027.hbase.pb.ClusterStatus\"\030" + - "\n\026IsMasterRunningRequest\"4\n\027IsMasterRunn" + - "ingResponse\022\031\n\021is_master_running\030\001 \002(\010\"I" + - "\n\024ExecProcedureRequest\0221\n\tprocedure\030\001 \002(" + - "\0132\036.hbase.pb.ProcedureDescription\"F\n\025Exe" + - "cProcedureResponse\022\030\n\020expected_timeout\030\001" + - " \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026IsProcedure" + - "DoneRequest\0221\n\tprocedure\030\001 \001(\0132\036.hbase.p" + - "b.ProcedureDescription\"`\n\027IsProcedureDon", - "eResponse\022\023\n\004done\030\001 \001(\010:\005false\0220\n\010snapsh" + - "ot\030\002 \001(\0132\036.hbase.pb.ProcedureDescription" + - "\",\n\031GetProcedureResultRequest\022\017\n\007proc_id" + - "\030\001 \002(\004\"\371\001\n\032GetProcedureResultResponse\0229\n" + - "\005state\030\001 \002(\0162*.hbase.pb.GetProcedureResu" + - "ltResponse.State\022\022\n\nstart_time\030\002 \001(\004\022\023\n\013" + - "last_update\030\003 \001(\004\022\016\n\006result\030\004 \001(\014\0224\n\texc" + - "eption\030\005 \001(\0132!.hbase.pb.ForeignException" + - "Message\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022\013\n\007RUNNI" + - "NG\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProcedureRequ", - "est\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInterruptIfRu" + - "nning\030\002 \001(\010:\004true\"6\n\026AbortProcedureRespo" + - "nse\022\034\n\024is_procedure_aborted\030\001 \002(\010\"\027\n\025Lis" + - "tProceduresRequest\"@\n\026ListProceduresResp" + - "onse\022&\n\tprocedure\030\001 \003(\0132\023.hbase.pb.Proce" + - "dure\"\315\001\n\017SetQuotaRequest\022\021\n\tuser_name\030\001 " + - "\001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnamespace\030\003 \001" + - "(\t\022\'\n\ntable_name\030\004 \001(\0132\023.hbase.pb.TableN" + - "ame\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypass_global" + - "s\030\006 \001(\010\022+\n\010throttle\030\007 \001(\0132\031.hbase.pb.Thr", - "ottleRequest\"\022\n\020SetQuotaResponse\"J\n\037Majo" + - "rCompactionTimestampRequest\022\'\n\ntable_nam" + - "e\030\001 \002(\0132\023.hbase.pb.TableName\"U\n(MajorCom" + - "pactionTimestampForRegionRequest\022)\n\006regi" + - "on\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"@\n M" + - "ajorCompactionTimestampResponse\022\034\n\024compa" + - "ction_timestamp\030\001 \002(\003\"\035\n\033SecurityCapabil" + - "itiesRequest\"\354\001\n\034SecurityCapabilitiesRes" + - "ponse\022G\n\014capabilities\030\001 \003(\01621.hbase.pb.S" + - "ecurityCapabilitiesResponse.Capability\"\202", - "\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTICATION\020\000" + - "\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHORIZA" + - "TION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CELL_V" + - "ISIBILITY\020\004*(\n\020MasterSwitchType\022\t\n\005SPLIT" + - "\020\000\022\t\n\005MERGE\020\0012\232+\n\rMasterService\022e\n\024GetSc" + - "hemaAlterStatus\022%.hbase.pb.GetSchemaAlte" + - "rStatusRequest\032&.hbase.pb.GetSchemaAlter" + - "StatusResponse\022b\n\023GetTableDescriptors\022$." + - "hbase.pb.GetTableDescriptorsRequest\032%.hb" + - "ase.pb.GetTableDescriptorsResponse\022P\n\rGe", - "tTableNames\022\036.hbase.pb.GetTableNamesRequ" + - "est\032\037.hbase.pb.GetTableNamesResponse\022Y\n\020" + - "GetClusterStatus\022!.hbase.pb.GetClusterSt" + - "atusRequest\032\".hbase.pb.GetClusterStatusR" + - "esponse\022V\n\017IsMasterRunning\022 .hbase.pb.Is" + - "MasterRunningRequest\032!.hbase.pb.IsMaster" + - "RunningResponse\022D\n\tAddColumn\022\032.hbase.pb." + - "AddColumnRequest\032\033.hbase.pb.AddColumnRes" + - "ponse\022M\n\014DeleteColumn\022\035.hbase.pb.DeleteC" + - "olumnRequest\032\036.hbase.pb.DeleteColumnResp", - "onse\022M\n\014ModifyColumn\022\035.hbase.pb.ModifyCo" + - "lumnRequest\032\036.hbase.pb.ModifyColumnRespo" + - "nse\022G\n\nMoveRegion\022\033.hbase.pb.MoveRegionR" + - "equest\032\034.hbase.pb.MoveRegionResponse\022k\n\026" + - "DispatchMergingRegions\022\'.hbase.pb.Dispat" + - "chMergingRegionsRequest\032(.hbase.pb.Dispa" + - "tchMergingRegionsResponse\022M\n\014AssignRegio" + - "n\022\035.hbase.pb.AssignRegionRequest\032\036.hbase" + - ".pb.AssignRegionResponse\022S\n\016UnassignRegi" + - "on\022\037.hbase.pb.UnassignRegionRequest\032 .hb", - "ase.pb.UnassignRegionResponse\022P\n\rOffline" + - "Region\022\036.hbase.pb.OfflineRegionRequest\032\037" + - ".hbase.pb.OfflineRegionResponse\022J\n\013Delet" + - "eTable\022\034.hbase.pb.DeleteTableRequest\032\035.h" + - "base.pb.DeleteTableResponse\022P\n\rtruncateT" + - "able\022\036.hbase.pb.TruncateTableRequest\032\037.h" + - "base.pb.TruncateTableResponse\022J\n\013EnableT" + - "able\022\034.hbase.pb.EnableTableRequest\032\035.hba" + - "se.pb.EnableTableResponse\022M\n\014DisableTabl" + - "e\022\035.hbase.pb.DisableTableRequest\032\036.hbase", - ".pb.DisableTableResponse\022J\n\013ModifyTable\022" + - "\034.hbase.pb.ModifyTableRequest\032\035.hbase.pb" + - ".ModifyTableResponse\022J\n\013CreateTable\022\034.hb" + - "ase.pb.CreateTableRequest\032\035.hbase.pb.Cre" + - "ateTableResponse\022A\n\010Shutdown\022\031.hbase.pb." + - "ShutdownRequest\032\032.hbase.pb.ShutdownRespo" + - "nse\022G\n\nStopMaster\022\033.hbase.pb.StopMasterR" + - "equest\032\034.hbase.pb.StopMasterResponse\022h\n\031" + - "IsMasterInMaintenanceMode\022$.hbase.pb.IsI" + - "nMaintenanceModeRequest\032%.hbase.pb.IsInM", - "aintenanceModeResponse\022>\n\007Balance\022\030.hbas" + - "e.pb.BalanceRequest\032\031.hbase.pb.BalanceRe" + - "sponse\022_\n\022SetBalancerRunning\022#.hbase.pb." + - "SetBalancerRunningRequest\032$.hbase.pb.Set" + - "BalancerRunningResponse\022\\\n\021IsBalancerEna" + - "bled\022\".hbase.pb.IsBalancerEnabledRequest" + - "\032#.hbase.pb.IsBalancerEnabledResponse\022k\n" + - "\026SetSplitOrMergeEnabled\022\'.hbase.pb.SetSp" + - "litOrMergeEnabledRequest\032(.hbase.pb.SetS" + - "plitOrMergeEnabledResponse\022h\n\025IsSplitOrM", - "ergeEnabled\022&.hbase.pb.IsSplitOrMergeEna" + - "bledRequest\032\'.hbase.pb.IsSplitOrMergeEna" + - "bledResponse\022D\n\tNormalize\022\032.hbase.pb.Nor" + - "malizeRequest\032\033.hbase.pb.NormalizeRespon" + - "se\022e\n\024SetNormalizerRunning\022%.hbase.pb.Se" + - "tNormalizerRunningRequest\032&.hbase.pb.Set" + - "NormalizerRunningResponse\022b\n\023IsNormalize" + - "rEnabled\022$.hbase.pb.IsNormalizerEnabledR" + - "equest\032%.hbase.pb.IsNormalizerEnabledRes" + - "ponse\022S\n\016RunCatalogScan\022\037.hbase.pb.RunCa", - "talogScanRequest\032 .hbase.pb.RunCatalogSc" + - "anResponse\022e\n\024EnableCatalogJanitor\022%.hba" + - "se.pb.EnableCatalogJanitorRequest\032&.hbas" + - "e.pb.EnableCatalogJanitorResponse\022n\n\027IsC" + - "atalogJanitorEnabled\022(.hbase.pb.IsCatalo" + - "gJanitorEnabledRequest\032).hbase.pb.IsCata" + - "logJanitorEnabledResponse\022V\n\017RunCleanerC" + - "hore\022 .hbase.pb.RunCleanerChoreRequest\032!" + - ".hbase.pb.RunCleanerChoreResponse\022k\n\026Set" + - "CleanerChoreRunning\022\'.hbase.pb.SetCleane", - "rChoreRunningRequest\032(.hbase.pb.SetClean" + - "erChoreRunningResponse\022h\n\025IsCleanerChore" + - "Enabled\022&.hbase.pb.IsCleanerChoreEnabled" + - "Request\032\'.hbase.pb.IsCleanerChoreEnabled" + - "Response\022^\n\021ExecMasterService\022#.hbase.pb" + - ".CoprocessorServiceRequest\032$.hbase.pb.Co" + - "processorServiceResponse\022A\n\010Snapshot\022\031.h" + - "base.pb.SnapshotRequest\032\032.hbase.pb.Snaps" + - "hotResponse\022h\n\025GetCompletedSnapshots\022&.h" + - "base.pb.GetCompletedSnapshotsRequest\032\'.h", - "base.pb.GetCompletedSnapshotsResponse\022S\n" + - "\016DeleteSnapshot\022\037.hbase.pb.DeleteSnapsho" + - "tRequest\032 .hbase.pb.DeleteSnapshotRespon" + - "se\022S\n\016IsSnapshotDone\022\037.hbase.pb.IsSnapsh" + - "otDoneRequest\032 .hbase.pb.IsSnapshotDoneR" + - "esponse\022V\n\017RestoreSnapshot\022 .hbase.pb.Re" + - "storeSnapshotRequest\032!.hbase.pb.RestoreS" + - "napshotResponse\022h\n\025IsRestoreSnapshotDone" + - "\022&.hbase.pb.IsRestoreSnapshotDoneRequest" + - "\032\'.hbase.pb.IsRestoreSnapshotDoneRespons", - "e\022P\n\rExecProcedure\022\036.hbase.pb.ExecProced" + - "ureRequest\032\037.hbase.pb.ExecProcedureRespo" + - "nse\022W\n\024ExecProcedureWithRet\022\036.hbase.pb.E" + - "xecProcedureRequest\032\037.hbase.pb.ExecProce" + - "dureResponse\022V\n\017IsProcedureDone\022 .hbase." + - "pb.IsProcedureDoneRequest\032!.hbase.pb.IsP" + - "rocedureDoneResponse\022V\n\017ModifyNamespace\022" + - " .hbase.pb.ModifyNamespaceRequest\032!.hbas" + - "e.pb.ModifyNamespaceResponse\022V\n\017CreateNa" + - "mespace\022 .hbase.pb.CreateNamespaceReques", - "t\032!.hbase.pb.CreateNamespaceResponse\022V\n\017" + - "DeleteNamespace\022 .hbase.pb.DeleteNamespa" + - "ceRequest\032!.hbase.pb.DeleteNamespaceResp" + - "onse\022k\n\026GetNamespaceDescriptor\022\'.hbase.p" + - "b.GetNamespaceDescriptorRequest\032(.hbase." + - "pb.GetNamespaceDescriptorResponse\022q\n\030Lis" + - "tNamespaceDescriptors\022).hbase.pb.ListNam" + - "espaceDescriptorsRequest\032*.hbase.pb.List" + - "NamespaceDescriptorsResponse\022\206\001\n\037ListTab" + - "leDescriptorsByNamespace\0220.hbase.pb.List", - "TableDescriptorsByNamespaceRequest\0321.hba" + - "se.pb.ListTableDescriptorsByNamespaceRes" + - "ponse\022t\n\031ListTableNamesByNamespace\022*.hba" + - "se.pb.ListTableNamesByNamespaceRequest\032+" + - ".hbase.pb.ListTableNamesByNamespaceRespo" + - "nse\022A\n\010SetQuota\022\031.hbase.pb.SetQuotaReque" + - "st\032\032.hbase.pb.SetQuotaResponse\022x\n\037getLas" + - "tMajorCompactionTimestamp\022).hbase.pb.Maj" + - "orCompactionTimestampRequest\032*.hbase.pb." + - "MajorCompactionTimestampResponse\022\212\001\n(get", - "LastMajorCompactionTimestampForRegion\0222." + - "hbase.pb.MajorCompactionTimestampForRegi" + - "onRequest\032*.hbase.pb.MajorCompactionTime" + - "stampResponse\022_\n\022getProcedureResult\022#.hb" + - "ase.pb.GetProcedureResultRequest\032$.hbase" + - ".pb.GetProcedureResultResponse\022h\n\027getSec" + - "urityCapabilities\022%.hbase.pb.SecurityCap" + - "abilitiesRequest\032&.hbase.pb.SecurityCapa" + - "bilitiesResponse\022S\n\016AbortProcedure\022\037.hba" + - "se.pb.AbortProcedureRequest\032 .hbase.pb.A", - "bortProcedureResponse\022S\n\016ListProcedures\022" + - "\037.hbase.pb.ListProceduresRequest\032 .hbase" + - ".pb.ListProceduresResponseBB\n*org.apache" + - ".hadoop.hbase.protobuf.generatedB\014Master" + - "ProtosH\001\210\001\001\240\001\001" + "oto\032\016Snapshot.proto\"\234\001\n\020AddColumnRequest" + + "\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableNam" + + "e\0225\n\017column_families\030\002 \002(\0132\034.hbase.pb.Co" + + "lumnFamilySchema\022\026\n\013nonce_group\030\003 \001(\004:\0010" + + "\022\020\n\005nonce\030\004 \001(\004:\0010\"\023\n\021AddColumnResponse\"" + + "}\n\023DeleteColumnRequest\022\'\n\ntable_name\030\001 \002" + + "(\0132\023.hbase.pb.TableName\022\023\n\013column_name\030\002", + " \002(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 " + + "\001(\004:\0010\"\026\n\024DeleteColumnResponse\"\237\001\n\023Modif" + + "yColumnRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hba" + + "se.pb.TableName\0225\n\017column_families\030\002 \002(\013" + + "2\034.hbase.pb.ColumnFamilySchema\022\026\n\013nonce_" + + "group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\026\n\024Modi" + + "fyColumnResponse\"n\n\021MoveRegionRequest\022)\n" + + "\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifier" + + "\022.\n\020dest_server_name\030\002 \001(\0132\024.hbase.pb.Se" + + "rverName\"\024\n\022MoveRegionResponse\"\222\001\n\035Dispa", + "tchMergingRegionsRequest\022+\n\010region_a\030\001 \002" + + "(\0132\031.hbase.pb.RegionSpecifier\022+\n\010region_" + + "b\030\002 \002(\0132\031.hbase.pb.RegionSpecifier\022\027\n\010fo" + + "rcible\030\003 \001(\010:\005false\" \n\036DispatchMergingRe" + + "gionsResponse\"@\n\023AssignRegionRequest\022)\n\006" + + "region\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"" + + "\026\n\024AssignRegionResponse\"X\n\025UnassignRegio" + + "nRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.Regi" + + "onSpecifier\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026Una" + + "ssignRegionResponse\"A\n\024OfflineRegionRequ", + "est\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpe" + + "cifier\"\027\n\025OfflineRegionResponse\"\177\n\022Creat" + + "eTableRequest\022+\n\014table_schema\030\001 \002(\0132\025.hb" + + "ase.pb.TableSchema\022\022\n\nsplit_keys\030\002 \003(\014\022\026" + + "\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010" + + "\"&\n\023CreateTableResponse\022\017\n\007proc_id\030\001 \001(\004" + + "\"g\n\022DeleteTableRequest\022\'\n\ntable_name\030\001 \002" + + "(\0132\023.hbase.pb.TableName\022\026\n\013nonce_group\030\002" + + " \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n\023DeleteTable" + + "Response\022\017\n\007proc_id\030\001 \001(\004\"\207\001\n\024TruncateTa", + "bleRequest\022&\n\ttableName\030\001 \002(\0132\023.hbase.pb" + + ".TableName\022\035\n\016preserveSplits\030\002 \001(\010:\005fals" + + "e\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004" + + ":\0010\"\027\n\025TruncateTableResponse\"g\n\022EnableTa" + + "bleRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.p" + + "b.TableName\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005n" + + "once\030\003 \001(\004:\0010\"&\n\023EnableTableResponse\022\017\n\007" + + "proc_id\030\001 \001(\004\"h\n\023DisableTableRequest\022\'\n\n" + + "table_name\030\001 \002(\0132\023.hbase.pb.TableName\022\026\n" + + "\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"", + "\'\n\024DisableTableResponse\022\017\n\007proc_id\030\001 \001(\004" + + "\"\224\001\n\022ModifyTableRequest\022\'\n\ntable_name\030\001 " + + "\002(\0132\023.hbase.pb.TableName\022+\n\014table_schema" + + "\030\002 \002(\0132\025.hbase.pb.TableSchema\022\026\n\013nonce_g" + + "roup\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\025\n\023Modif" + + "yTableResponse\"~\n\026CreateNamespaceRequest" + + "\022:\n\023namespaceDescriptor\030\001 \002(\0132\035.hbase.pb" + + ".NamespaceDescriptor\022\026\n\013nonce_group\030\002 \001(" + + "\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\031\n\027CreateNamespac" + + "eResponse\"Y\n\026DeleteNamespaceRequest\022\025\n\rn", + "amespaceName\030\001 \002(\t\022\026\n\013nonce_group\030\002 \001(\004:" + + "\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\031\n\027DeleteNamespaceR" + + "esponse\"~\n\026ModifyNamespaceRequest\022:\n\023nam" + + "espaceDescriptor\030\001 \002(\0132\035.hbase.pb.Namesp" + + "aceDescriptor\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n" + + "\005nonce\030\003 \001(\004:\0010\"\031\n\027ModifyNamespaceRespon" + + "se\"6\n\035GetNamespaceDescriptorRequest\022\025\n\rn" + + "amespaceName\030\001 \002(\t\"\\\n\036GetNamespaceDescri" + + "ptorResponse\022:\n\023namespaceDescriptor\030\001 \002(" + + "\0132\035.hbase.pb.NamespaceDescriptor\"!\n\037List", + "NamespaceDescriptorsRequest\"^\n ListNames" + + "paceDescriptorsResponse\022:\n\023namespaceDesc" + + "riptor\030\001 \003(\0132\035.hbase.pb.NamespaceDescrip" + + "tor\"?\n&ListTableDescriptorsByNamespaceRe" + + "quest\022\025\n\rnamespaceName\030\001 \002(\t\"U\n\'ListTabl" + + "eDescriptorsByNamespaceResponse\022*\n\013table" + + "Schema\030\001 \003(\0132\025.hbase.pb.TableSchema\"9\n L" + + "istTableNamesByNamespaceRequest\022\025\n\rnames" + + "paceName\030\001 \002(\t\"K\n!ListTableNamesByNamesp" + + "aceResponse\022&\n\ttableName\030\001 \003(\0132\023.hbase.p", + "b.TableName\"\021\n\017ShutdownRequest\"\022\n\020Shutdo" + + "wnResponse\"\023\n\021StopMasterRequest\"\024\n\022StopM" + + "asterResponse\"\034\n\032IsInMaintenanceModeRequ" + + "est\"8\n\033IsInMaintenanceModeResponse\022\031\n\021in" + + "MaintenanceMode\030\001 \002(\010\"\037\n\016BalanceRequest\022" + + "\r\n\005force\030\001 \001(\010\"\'\n\017BalanceResponse\022\024\n\014bal" + + "ancer_ran\030\001 \002(\010\"<\n\031SetBalancerRunningReq" + + "uest\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\"8\n" + + "\032SetBalancerRunningResponse\022\032\n\022prev_bala" + + "nce_value\030\001 \001(\010\"\032\n\030IsBalancerEnabledRequ", + "est\",\n\031IsBalancerEnabledResponse\022\017\n\007enab" + + "led\030\001 \002(\010\"w\n\035SetSplitOrMergeEnabledReque" + + "st\022\017\n\007enabled\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010" + + "\0220\n\014switch_types\030\003 \003(\0162\032.hbase.pb.Master" + + "SwitchType\"4\n\036SetSplitOrMergeEnabledResp" + + "onse\022\022\n\nprev_value\030\001 \003(\010\"O\n\034IsSplitOrMer" + + "geEnabledRequest\022/\n\013switch_type\030\001 \002(\0162\032." + + "hbase.pb.MasterSwitchType\"0\n\035IsSplitOrMe" + + "rgeEnabledResponse\022\017\n\007enabled\030\001 \002(\010\"\022\n\020N" + + "ormalizeRequest\"+\n\021NormalizeResponse\022\026\n\016", + "normalizer_ran\030\001 \002(\010\")\n\033SetNormalizerRun" + + "ningRequest\022\n\n\002on\030\001 \002(\010\"=\n\034SetNormalizer" + + "RunningResponse\022\035\n\025prev_normalizer_value" + + "\030\001 \001(\010\"\034\n\032IsNormalizerEnabledRequest\".\n\033" + + "IsNormalizerEnabledResponse\022\017\n\007enabled\030\001" + + " \002(\010\"\027\n\025RunCatalogScanRequest\"-\n\026RunCata" + + "logScanResponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033" + + "EnableCatalogJanitorRequest\022\016\n\006enable\030\001 " + + "\002(\010\"2\n\034EnableCatalogJanitorResponse\022\022\n\np" + + "rev_value\030\001 \001(\010\" \n\036IsCatalogJanitorEnabl", + "edRequest\"0\n\037IsCatalogJanitorEnabledResp" + + "onse\022\r\n\005value\030\001 \002(\010\"\030\n\026RunCleanerChoreRe" + + "quest\"4\n\027RunCleanerChoreResponse\022\031\n\021clea" + + "ner_chore_ran\030\001 \002(\010\"+\n\035SetCleanerChoreRu" + + "nningRequest\022\n\n\002on\030\001 \002(\010\"4\n\036SetCleanerCh" + + "oreRunningResponse\022\022\n\nprev_value\030\001 \001(\010\"\036" + + "\n\034IsCleanerChoreEnabledRequest\".\n\035IsClea" + + "nerChoreEnabledResponse\022\r\n\005value\030\001 \002(\010\"B" + + "\n\017SnapshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hb" + + "ase.pb.SnapshotDescription\",\n\020SnapshotRe", + "sponse\022\030\n\020expected_timeout\030\001 \002(\003\"\036\n\034GetC" + + "ompletedSnapshotsRequest\"Q\n\035GetCompleted" + + "SnapshotsResponse\0220\n\tsnapshots\030\001 \003(\0132\035.h" + + "base.pb.SnapshotDescription\"H\n\025DeleteSna" + + "pshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.p" + + "b.SnapshotDescription\"\030\n\026DeleteSnapshotR" + + "esponse\"d\n\026RestoreSnapshotRequest\022/\n\010sna" + + "pshot\030\001 \002(\0132\035.hbase.pb.SnapshotDescripti" + + "on\022\031\n\nrestoreACL\030\002 \001(\010:\005false\"\031\n\027Restore" + + "SnapshotResponse\"H\n\025IsSnapshotDoneReques", + "t\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb.SnapshotD" + + "escription\"^\n\026IsSnapshotDoneResponse\022\023\n\004" + + "done\030\001 \001(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hb" + + "ase.pb.SnapshotDescription\"O\n\034IsRestoreS" + + "napshotDoneRequest\022/\n\010snapshot\030\001 \001(\0132\035.h" + + "base.pb.SnapshotDescription\"4\n\035IsRestore" + + "SnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fals" + + "e\"F\n\033GetSchemaAlterStatusRequest\022\'\n\ntabl" + + "e_name\030\001 \002(\0132\023.hbase.pb.TableName\"T\n\034Get" + + "SchemaAlterStatusResponse\022\035\n\025yet_to_upda", + "te_regions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"" + + "\213\001\n\032GetTableDescriptorsRequest\022(\n\013table_" + + "names\030\001 \003(\0132\023.hbase.pb.TableName\022\r\n\005rege" + + "x\030\002 \001(\t\022!\n\022include_sys_tables\030\003 \001(\010:\005fal" + + "se\022\021\n\tnamespace\030\004 \001(\t\"J\n\033GetTableDescrip" + + "torsResponse\022+\n\014table_schema\030\001 \003(\0132\025.hba" + + "se.pb.TableSchema\"[\n\024GetTableNamesReques" + + "t\022\r\n\005regex\030\001 \001(\t\022!\n\022include_sys_tables\030\002" + + " \001(\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTab" + + "leNamesResponse\022(\n\013table_names\030\001 \003(\0132\023.h", + "base.pb.TableName\"\031\n\027GetClusterStatusReq" + + "uest\"K\n\030GetClusterStatusResponse\022/\n\016clus" + + "ter_status\030\001 \002(\0132\027.hbase.pb.ClusterStatu" + + "s\"\030\n\026IsMasterRunningRequest\"4\n\027IsMasterR" + + "unningResponse\022\031\n\021is_master_running\030\001 \002(" + + "\010\"I\n\024ExecProcedureRequest\0221\n\tprocedure\030\001" + + " \002(\0132\036.hbase.pb.ProcedureDescription\"F\n\025" + + "ExecProcedureResponse\022\030\n\020expected_timeou" + + "t\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026IsProced" + + "ureDoneRequest\0221\n\tprocedure\030\001 \001(\0132\036.hbas", + "e.pb.ProcedureDescription\"`\n\027IsProcedure" + + "DoneResponse\022\023\n\004done\030\001 \001(\010:\005false\0220\n\010sna" + + "pshot\030\002 \001(\0132\036.hbase.pb.ProcedureDescript" + + "ion\",\n\031GetProcedureResultRequest\022\017\n\007proc" + + "_id\030\001 \002(\004\"\371\001\n\032GetProcedureResultResponse" + + "\0229\n\005state\030\001 \002(\0162*.hbase.pb.GetProcedureR" + + "esultResponse.State\022\022\n\nstart_time\030\002 \001(\004\022" + + "\023\n\013last_update\030\003 \001(\004\022\016\n\006result\030\004 \001(\014\0224\n\t" + + "exception\030\005 \001(\0132!.hbase.pb.ForeignExcept" + + "ionMessage\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022\013\n\007RU", + "NNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProcedureR" + + "equest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInterruptI" + + "fRunning\030\002 \001(\010:\004true\"6\n\026AbortProcedureRe" + + "sponse\022\034\n\024is_procedure_aborted\030\001 \002(\010\"\027\n\025" + + "ListProceduresRequest\"@\n\026ListProceduresR" + + "esponse\022&\n\tprocedure\030\001 \003(\0132\023.hbase.pb.Pr" + + "ocedure\"\315\001\n\017SetQuotaRequest\022\021\n\tuser_name" + + "\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnamespace\030" + + "\003 \001(\t\022\'\n\ntable_name\030\004 \001(\0132\023.hbase.pb.Tab" + + "leName\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypass_glo", + "bals\030\006 \001(\010\022+\n\010throttle\030\007 \001(\0132\031.hbase.pb." + + "ThrottleRequest\"\022\n\020SetQuotaResponse\"J\n\037M" + + "ajorCompactionTimestampRequest\022\'\n\ntable_" + + "name\030\001 \002(\0132\023.hbase.pb.TableName\"U\n(Major" + + "CompactionTimestampForRegionRequest\022)\n\006r" + + "egion\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"@" + + "\n MajorCompactionTimestampResponse\022\034\n\024co" + + "mpaction_timestamp\030\001 \002(\003\"\035\n\033SecurityCapa" + + "bilitiesRequest\"\354\001\n\034SecurityCapabilities" + + "Response\022G\n\014capabilities\030\001 \003(\01621.hbase.p", + "b.SecurityCapabilitiesResponse.Capabilit" + + "y\"\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTICATIO" + + "N\020\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHOR" + + "IZATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CEL" + + "L_VISIBILITY\020\004*(\n\020MasterSwitchType\022\t\n\005SP" + + "LIT\020\000\022\t\n\005MERGE\020\0012\232+\n\rMasterService\022e\n\024Ge" + + "tSchemaAlterStatus\022%.hbase.pb.GetSchemaA" + + "lterStatusRequest\032&.hbase.pb.GetSchemaAl" + + "terStatusResponse\022b\n\023GetTableDescriptors" + + "\022$.hbase.pb.GetTableDescriptorsRequest\032%", + ".hbase.pb.GetTableDescriptorsResponse\022P\n" + + "\rGetTableNames\022\036.hbase.pb.GetTableNamesR" + + "equest\032\037.hbase.pb.GetTableNamesResponse\022" + + "Y\n\020GetClusterStatus\022!.hbase.pb.GetCluste" + + "rStatusRequest\032\".hbase.pb.GetClusterStat" + + "usResponse\022V\n\017IsMasterRunning\022 .hbase.pb" + + ".IsMasterRunningRequest\032!.hbase.pb.IsMas" + + "terRunningResponse\022D\n\tAddColumn\022\032.hbase." + + "pb.AddColumnRequest\032\033.hbase.pb.AddColumn" + + "Response\022M\n\014DeleteColumn\022\035.hbase.pb.Dele", + "teColumnRequest\032\036.hbase.pb.DeleteColumnR" + + "esponse\022M\n\014ModifyColumn\022\035.hbase.pb.Modif" + + "yColumnRequest\032\036.hbase.pb.ModifyColumnRe" + + "sponse\022G\n\nMoveRegion\022\033.hbase.pb.MoveRegi" + + "onRequest\032\034.hbase.pb.MoveRegionResponse\022" + + "k\n\026DispatchMergingRegions\022\'.hbase.pb.Dis" + + "patchMergingRegionsRequest\032(.hbase.pb.Di" + + "spatchMergingRegionsResponse\022M\n\014AssignRe" + + "gion\022\035.hbase.pb.AssignRegionRequest\032\036.hb" + + "ase.pb.AssignRegionResponse\022S\n\016UnassignR", + "egion\022\037.hbase.pb.UnassignRegionRequest\032 " + + ".hbase.pb.UnassignRegionResponse\022P\n\rOffl" + + "ineRegion\022\036.hbase.pb.OfflineRegionReques" + + "t\032\037.hbase.pb.OfflineRegionResponse\022J\n\013De" + + "leteTable\022\034.hbase.pb.DeleteTableRequest\032" + + "\035.hbase.pb.DeleteTableResponse\022P\n\rtrunca" + + "teTable\022\036.hbase.pb.TruncateTableRequest\032" + + "\037.hbase.pb.TruncateTableResponse\022J\n\013Enab" + + "leTable\022\034.hbase.pb.EnableTableRequest\032\035." + + "hbase.pb.EnableTableResponse\022M\n\014DisableT", + "able\022\035.hbase.pb.DisableTableRequest\032\036.hb" + + "ase.pb.DisableTableResponse\022J\n\013ModifyTab" + + "le\022\034.hbase.pb.ModifyTableRequest\032\035.hbase" + + ".pb.ModifyTableResponse\022J\n\013CreateTable\022\034" + + ".hbase.pb.CreateTableRequest\032\035.hbase.pb." + + "CreateTableResponse\022A\n\010Shutdown\022\031.hbase." + + "pb.ShutdownRequest\032\032.hbase.pb.ShutdownRe" + + "sponse\022G\n\nStopMaster\022\033.hbase.pb.StopMast" + + "erRequest\032\034.hbase.pb.StopMasterResponse\022" + + "h\n\031IsMasterInMaintenanceMode\022$.hbase.pb.", + "IsInMaintenanceModeRequest\032%.hbase.pb.Is" + + "InMaintenanceModeResponse\022>\n\007Balance\022\030.h" + + "base.pb.BalanceRequest\032\031.hbase.pb.Balanc" + + "eResponse\022_\n\022SetBalancerRunning\022#.hbase." + + "pb.SetBalancerRunningRequest\032$.hbase.pb." + + "SetBalancerRunningResponse\022\\\n\021IsBalancer" + + "Enabled\022\".hbase.pb.IsBalancerEnabledRequ" + + "est\032#.hbase.pb.IsBalancerEnabledResponse" + + "\022k\n\026SetSplitOrMergeEnabled\022\'.hbase.pb.Se" + + "tSplitOrMergeEnabledRequest\032(.hbase.pb.S", + "etSplitOrMergeEnabledResponse\022h\n\025IsSplit" + + "OrMergeEnabled\022&.hbase.pb.IsSplitOrMerge" + + "EnabledRequest\032\'.hbase.pb.IsSplitOrMerge" + + "EnabledResponse\022D\n\tNormalize\022\032.hbase.pb." + + "NormalizeRequest\032\033.hbase.pb.NormalizeRes" + + "ponse\022e\n\024SetNormalizerRunning\022%.hbase.pb" + + ".SetNormalizerRunningRequest\032&.hbase.pb." + + "SetNormalizerRunningResponse\022b\n\023IsNormal" + + "izerEnabled\022$.hbase.pb.IsNormalizerEnabl" + + "edRequest\032%.hbase.pb.IsNormalizerEnabled", + "Response\022S\n\016RunCatalogScan\022\037.hbase.pb.Ru" + + "nCatalogScanRequest\032 .hbase.pb.RunCatalo" + + "gScanResponse\022e\n\024EnableCatalogJanitor\022%." + + "hbase.pb.EnableCatalogJanitorRequest\032&.h" + + "base.pb.EnableCatalogJanitorResponse\022n\n\027" + + "IsCatalogJanitorEnabled\022(.hbase.pb.IsCat" + + "alogJanitorEnabledRequest\032).hbase.pb.IsC" + + "atalogJanitorEnabledResponse\022V\n\017RunClean" + + "erChore\022 .hbase.pb.RunCleanerChoreReques" + + "t\032!.hbase.pb.RunCleanerChoreResponse\022k\n\026", + "SetCleanerChoreRunning\022\'.hbase.pb.SetCle" + + "anerChoreRunningRequest\032(.hbase.pb.SetCl" + + "eanerChoreRunningResponse\022h\n\025IsCleanerCh" + + "oreEnabled\022&.hbase.pb.IsCleanerChoreEnab" + + "ledRequest\032\'.hbase.pb.IsCleanerChoreEnab" + + "ledResponse\022^\n\021ExecMasterService\022#.hbase" + + ".pb.CoprocessorServiceRequest\032$.hbase.pb" + + ".CoprocessorServiceResponse\022A\n\010Snapshot\022" + + "\031.hbase.pb.SnapshotRequest\032\032.hbase.pb.Sn" + + "apshotResponse\022h\n\025GetCompletedSnapshots\022", + "&.hbase.pb.GetCompletedSnapshotsRequest\032" + + "\'.hbase.pb.GetCompletedSnapshotsResponse" + + "\022S\n\016DeleteSnapshot\022\037.hbase.pb.DeleteSnap" + + "shotRequest\032 .hbase.pb.DeleteSnapshotRes" + + "ponse\022S\n\016IsSnapshotDone\022\037.hbase.pb.IsSna" + + "pshotDoneRequest\032 .hbase.pb.IsSnapshotDo" + + "neResponse\022V\n\017RestoreSnapshot\022 .hbase.pb" + + ".RestoreSnapshotRequest\032!.hbase.pb.Resto" + + "reSnapshotResponse\022h\n\025IsRestoreSnapshotD" + + "one\022&.hbase.pb.IsRestoreSnapshotDoneRequ", + "est\032\'.hbase.pb.IsRestoreSnapshotDoneResp" + + "onse\022P\n\rExecProcedure\022\036.hbase.pb.ExecPro" + + "cedureRequest\032\037.hbase.pb.ExecProcedureRe" + + "sponse\022W\n\024ExecProcedureWithRet\022\036.hbase.p" + + "b.ExecProcedureRequest\032\037.hbase.pb.ExecPr" + + "ocedureResponse\022V\n\017IsProcedureDone\022 .hba" + + "se.pb.IsProcedureDoneRequest\032!.hbase.pb." + + "IsProcedureDoneResponse\022V\n\017ModifyNamespa" + + "ce\022 .hbase.pb.ModifyNamespaceRequest\032!.h" + + "base.pb.ModifyNamespaceResponse\022V\n\017Creat", + "eNamespace\022 .hbase.pb.CreateNamespaceReq" + + "uest\032!.hbase.pb.CreateNamespaceResponse\022" + + "V\n\017DeleteNamespace\022 .hbase.pb.DeleteName" + + "spaceRequest\032!.hbase.pb.DeleteNamespaceR" + + "esponse\022k\n\026GetNamespaceDescriptor\022\'.hbas" + + "e.pb.GetNamespaceDescriptorRequest\032(.hba" + + "se.pb.GetNamespaceDescriptorResponse\022q\n\030" + + "ListNamespaceDescriptors\022).hbase.pb.List" + + "NamespaceDescriptorsRequest\032*.hbase.pb.L" + + "istNamespaceDescriptorsResponse\022\206\001\n\037List", + "TableDescriptorsByNamespace\0220.hbase.pb.L" + + "istTableDescriptorsByNamespaceRequest\0321." + + "hbase.pb.ListTableDescriptorsByNamespace" + + "Response\022t\n\031ListTableNamesByNamespace\022*." + + "hbase.pb.ListTableNamesByNamespaceReques" + + "t\032+.hbase.pb.ListTableNamesByNamespaceRe" + + "sponse\022A\n\010SetQuota\022\031.hbase.pb.SetQuotaRe" + + "quest\032\032.hbase.pb.SetQuotaResponse\022x\n\037get" + + "LastMajorCompactionTimestamp\022).hbase.pb." + + "MajorCompactionTimestampRequest\032*.hbase.", + "pb.MajorCompactionTimestampResponse\022\212\001\n(" + + "getLastMajorCompactionTimestampForRegion" + + "\0222.hbase.pb.MajorCompactionTimestampForR" + + "egionRequest\032*.hbase.pb.MajorCompactionT" + + "imestampResponse\022_\n\022getProcedureResult\022#" + + ".hbase.pb.GetProcedureResultRequest\032$.hb" + + "ase.pb.GetProcedureResultResponse\022h\n\027get" + + "SecurityCapabilities\022%.hbase.pb.Security" + + "CapabilitiesRequest\032&.hbase.pb.SecurityC" + + "apabilitiesResponse\022S\n\016AbortProcedure\022\037.", + "hbase.pb.AbortProcedureRequest\032 .hbase.p" + + "b.AbortProcedureResponse\022S\n\016ListProcedur" + + "es\022\037.hbase.pb.ListProceduresRequest\032 .hb" + + "ase.pb.ListProceduresResponseBB\n*org.apa" + + "che.hadoop.hbase.protobuf.generatedB\014Mas" + + "terProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -66928,7 +67019,7 @@ public final class MasterProtos { internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RestoreSnapshotRequest_descriptor, - new java.lang.String[] { "Snapshot", }); + new java.lang.String[] { "Snapshot", "RestoreACL", }); internal_static_hbase_pb_RestoreSnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(83); internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable = new @@ -67133,6 +67224,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.getDescriptor(), }, assigner); } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java index 9805d505082..2e11b4a2be7 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java @@ -8,6 +8,1494 @@ public final class SnapshotProtos { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + public interface SnapshotDescriptionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + /** + * required string name = 1; + */ + boolean hasName(); + /** + * required string name = 1; + */ + java.lang.String getName(); + /** + * required string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + // optional string table = 2; + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + boolean hasTable(); + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + java.lang.String getTable(); + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + com.google.protobuf.ByteString + getTableBytes(); + + // optional int64 creation_time = 3 [default = 0]; + /** + * optional int64 creation_time = 3 [default = 0]; + */ + boolean hasCreationTime(); + /** + * optional int64 creation_time = 3 [default = 0]; + */ + long getCreationTime(); + + // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + boolean hasType(); + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType(); + + // optional int32 version = 5; + /** + * optional int32 version = 5; + */ + boolean hasVersion(); + /** + * optional int32 version = 5; + */ + int getVersion(); + + // optional string owner = 6; + /** + * optional string owner = 6; + */ + boolean hasOwner(); + /** + * optional string owner = 6; + */ + java.lang.String getOwner(); + /** + * optional string owner = 6; + */ + com.google.protobuf.ByteString + getOwnerBytes(); + + // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + boolean hasUsersAndPermissions(); + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions(); + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder(); + } + /** + * Protobuf type {@code hbase.pb.SnapshotDescription} + * + *
+   **
+   * Description of the snapshot to take
+   * 
+ */ + public static final class SnapshotDescription extends + com.google.protobuf.GeneratedMessage + implements SnapshotDescriptionOrBuilder { + // Use SnapshotDescription.newBuilder() to construct. + private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SnapshotDescription defaultInstance; + public static SnapshotDescription getDefaultInstance() { + return defaultInstance; + } + + public SnapshotDescription getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SnapshotDescription( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + table_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + creationTime_ = input.readInt64(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + type_ = value; + } + break; + } + case 40: { + bitField0_ |= 0x00000010; + version_ = input.readInt32(); + break; + } + case 50: { + bitField0_ |= 0x00000020; + owner_ = input.readBytes(); + break; + } + case 58: { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder subBuilder = null; + if (((bitField0_ & 0x00000040) == 0x00000040)) { + subBuilder = usersAndPermissions_.toBuilder(); + } + usersAndPermissions_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(usersAndPermissions_); + usersAndPermissions_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000040; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SnapshotDescription parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotDescription(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code hbase.pb.SnapshotDescription.Type} + */ + public enum Type + implements com.google.protobuf.ProtocolMessageEnum { + /** + * DISABLED = 0; + */ + DISABLED(0, 0), + /** + * FLUSH = 1; + */ + FLUSH(1, 1), + /** + * SKIPFLUSH = 2; + */ + SKIPFLUSH(2, 2), + ; + + /** + * DISABLED = 0; + */ + public static final int DISABLED_VALUE = 0; + /** + * FLUSH = 1; + */ + public static final int FLUSH_VALUE = 1; + /** + * SKIPFLUSH = 2; + */ + public static final int SKIPFLUSH_VALUE = 2; + + + public final int getNumber() { return value; } + + public static Type valueOf(int value) { + switch (value) { + case 0: return DISABLED; + case 1: return FLUSH; + case 2: return SKIPFLUSH; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Type findValueByNumber(int number) { + return Type.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0); + } + + private static final Type[] VALUES = values(); + + public static Type valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Type(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type) + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + /** + * required string name = 1; + */ + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; + } + return s; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string table = 2; + public static final int TABLE_FIELD_NUMBER = 2; + private java.lang.Object table_; + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + public boolean hasTable() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + table_ = s; + } + return s; + } + } + /** + * optional string table = 2; + * + *
+     * not needed for delete, but checked for in taking snapshot
+     * 
+ */ + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional int64 creation_time = 3 [default = 0]; + public static final int CREATION_TIME_FIELD_NUMBER = 3; + private long creationTime_; + /** + * optional int64 creation_time = 3 [default = 0]; + */ + public boolean hasCreationTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int64 creation_time = 3 [default = 0]; + */ + public long getCreationTime() { + return creationTime_; + } + + // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + public static final int TYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type type_; + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + public boolean hasType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() { + return type_; + } + + // optional int32 version = 5; + public static final int VERSION_FIELD_NUMBER = 5; + private int version_; + /** + * optional int32 version = 5; + */ + public boolean hasVersion() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int32 version = 5; + */ + public int getVersion() { + return version_; + } + + // optional string owner = 6; + public static final int OWNER_FIELD_NUMBER = 6; + private java.lang.Object owner_; + /** + * optional string owner = 6; + */ + public boolean hasOwner() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional string owner = 6; + */ + public java.lang.String getOwner() { + java.lang.Object ref = owner_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + owner_ = s; + } + return s; + } + } + /** + * optional string owner = 6; + */ + public com.google.protobuf.ByteString + getOwnerBytes() { + java.lang.Object ref = owner_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + owner_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + public static final int USERS_AND_PERMISSIONS_FIELD_NUMBER = 7; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_; + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public boolean hasUsersAndPermissions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() { + return usersAndPermissions_; + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() { + return usersAndPermissions_; + } + + private void initFields() { + name_ = ""; + table_ = ""; + creationTime_ = 0L; + type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH; + version_ = 0; + owner_ = ""; + usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (hasUsersAndPermissions()) { + if (!getUsersAndPermissions().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getTableBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeInt64(3, creationTime_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, type_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeInt32(5, version_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBytes(6, getOwnerBytes()); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeMessage(7, usersAndPermissions_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getTableBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, creationTime_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, type_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(5, version_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, getOwnerBytes()); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, usersAndPermissions_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasTable() == other.hasTable()); + if (hasTable()) { + result = result && getTable() + .equals(other.getTable()); + } + result = result && (hasCreationTime() == other.hasCreationTime()); + if (hasCreationTime()) { + result = result && (getCreationTime() + == other.getCreationTime()); + } + result = result && (hasType() == other.hasType()); + if (hasType()) { + result = result && + (getType() == other.getType()); + } + result = result && (hasVersion() == other.hasVersion()); + if (hasVersion()) { + result = result && (getVersion() + == other.getVersion()); + } + result = result && (hasOwner() == other.hasOwner()); + if (hasOwner()) { + result = result && getOwner() + .equals(other.getOwner()); + } + result = result && (hasUsersAndPermissions() == other.hasUsersAndPermissions()); + if (hasUsersAndPermissions()) { + result = result && getUsersAndPermissions() + .equals(other.getUsersAndPermissions()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasTable()) { + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + } + if (hasCreationTime()) { + hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCreationTime()); + } + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getType()); + } + if (hasVersion()) { + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + } + if (hasOwner()) { + hash = (37 * hash) + OWNER_FIELD_NUMBER; + hash = (53 * hash) + getOwner().hashCode(); + } + if (hasUsersAndPermissions()) { + hash = (37 * hash) + USERS_AND_PERMISSIONS_FIELD_NUMBER; + hash = (53 * hash) + getUsersAndPermissions().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SnapshotDescription} + * + *
+     **
+     * Description of the snapshot to take
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getUsersAndPermissionsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + table_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + creationTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH; + bitField0_ = (bitField0_ & ~0x00000008); + version_ = 0; + bitField0_ = (bitField0_ & ~0x00000010); + owner_ = ""; + bitField0_ = (bitField0_ & ~0x00000020); + if (usersAndPermissionsBuilder_ == null) { + usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance(); + } else { + usersAndPermissionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000040); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription build() { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.table_ = table_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.creationTime_ = creationTime_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.type_ = type_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.version_ = version_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.owner_ = owner_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000040; + } + if (usersAndPermissionsBuilder_ == null) { + result.usersAndPermissions_ = usersAndPermissions_; + } else { + result.usersAndPermissions_ = usersAndPermissionsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) return this; + if (other.hasName()) { + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); + } + if (other.hasTable()) { + bitField0_ |= 0x00000002; + table_ = other.table_; + onChanged(); + } + if (other.hasCreationTime()) { + setCreationTime(other.getCreationTime()); + } + if (other.hasType()) { + setType(other.getType()); + } + if (other.hasVersion()) { + setVersion(other.getVersion()); + } + if (other.hasOwner()) { + bitField0_ |= 0x00000020; + owner_ = other.owner_; + onChanged(); + } + if (other.hasUsersAndPermissions()) { + mergeUsersAndPermissions(other.getUsersAndPermissions()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (hasUsersAndPermissions()) { + if (!getUsersAndPermissions().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + /** + * required string name = 1; + */ + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + /** + * required string name = 1; + */ + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + + // optional string table = 2; + private java.lang.Object table_ = ""; + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public boolean hasTable() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + table_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public Builder setTable( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + table_ = value; + onChanged(); + return this; + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public Builder clearTable() { + bitField0_ = (bitField0_ & ~0x00000002); + table_ = getDefaultInstance().getTable(); + onChanged(); + return this; + } + /** + * optional string table = 2; + * + *
+       * not needed for delete, but checked for in taking snapshot
+       * 
+ */ + public Builder setTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + table_ = value; + onChanged(); + return this; + } + + // optional int64 creation_time = 3 [default = 0]; + private long creationTime_ ; + /** + * optional int64 creation_time = 3 [default = 0]; + */ + public boolean hasCreationTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int64 creation_time = 3 [default = 0]; + */ + public long getCreationTime() { + return creationTime_; + } + /** + * optional int64 creation_time = 3 [default = 0]; + */ + public Builder setCreationTime(long value) { + bitField0_ |= 0x00000004; + creationTime_ = value; + onChanged(); + return this; + } + /** + * optional int64 creation_time = 3 [default = 0]; + */ + public Builder clearCreationTime() { + bitField0_ = (bitField0_ & ~0x00000004); + creationTime_ = 0L; + onChanged(); + return this; + } + + // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH; + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + public boolean hasType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() { + return type_; + } + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + public Builder setType(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + type_ = value; + onChanged(); + return this; + } + /** + * optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH]; + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000008); + type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH; + onChanged(); + return this; + } + + // optional int32 version = 5; + private int version_ ; + /** + * optional int32 version = 5; + */ + public boolean hasVersion() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int32 version = 5; + */ + public int getVersion() { + return version_; + } + /** + * optional int32 version = 5; + */ + public Builder setVersion(int value) { + bitField0_ |= 0x00000010; + version_ = value; + onChanged(); + return this; + } + /** + * optional int32 version = 5; + */ + public Builder clearVersion() { + bitField0_ = (bitField0_ & ~0x00000010); + version_ = 0; + onChanged(); + return this; + } + + // optional string owner = 6; + private java.lang.Object owner_ = ""; + /** + * optional string owner = 6; + */ + public boolean hasOwner() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional string owner = 6; + */ + public java.lang.String getOwner() { + java.lang.Object ref = owner_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + owner_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string owner = 6; + */ + public com.google.protobuf.ByteString + getOwnerBytes() { + java.lang.Object ref = owner_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + owner_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string owner = 6; + */ + public Builder setOwner( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + owner_ = value; + onChanged(); + return this; + } + /** + * optional string owner = 6; + */ + public Builder clearOwner() { + bitField0_ = (bitField0_ & ~0x00000020); + owner_ = getDefaultInstance().getOwner(); + onChanged(); + return this; + } + /** + * optional string owner = 6; + */ + public Builder setOwnerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + owner_ = value; + onChanged(); + return this; + } + + // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder> usersAndPermissionsBuilder_; + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public boolean hasUsersAndPermissions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() { + if (usersAndPermissionsBuilder_ == null) { + return usersAndPermissions_; + } else { + return usersAndPermissionsBuilder_.getMessage(); + } + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public Builder setUsersAndPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions value) { + if (usersAndPermissionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + usersAndPermissions_ = value; + onChanged(); + } else { + usersAndPermissionsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000040; + return this; + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public Builder setUsersAndPermissions( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder builderForValue) { + if (usersAndPermissionsBuilder_ == null) { + usersAndPermissions_ = builderForValue.build(); + onChanged(); + } else { + usersAndPermissionsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000040; + return this; + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public Builder mergeUsersAndPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions value) { + if (usersAndPermissionsBuilder_ == null) { + if (((bitField0_ & 0x00000040) == 0x00000040) && + usersAndPermissions_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance()) { + usersAndPermissions_ = + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.newBuilder(usersAndPermissions_).mergeFrom(value).buildPartial(); + } else { + usersAndPermissions_ = value; + } + onChanged(); + } else { + usersAndPermissionsBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000040; + return this; + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public Builder clearUsersAndPermissions() { + if (usersAndPermissionsBuilder_ == null) { + usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance(); + onChanged(); + } else { + usersAndPermissionsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000040); + return this; + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder getUsersAndPermissionsBuilder() { + bitField0_ |= 0x00000040; + onChanged(); + return getUsersAndPermissionsFieldBuilder().getBuilder(); + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() { + if (usersAndPermissionsBuilder_ != null) { + return usersAndPermissionsBuilder_.getMessageOrBuilder(); + } else { + return usersAndPermissions_; + } + } + /** + * optional .hbase.pb.UsersAndPermissions users_and_permissions = 7; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder> + getUsersAndPermissionsFieldBuilder() { + if (usersAndPermissionsBuilder_ == null) { + usersAndPermissionsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>( + usersAndPermissions_, + getParentForChildren(), + isClean()); + usersAndPermissions_ = null; + } + return usersAndPermissionsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription) + } + + static { + defaultInstance = new SnapshotDescription(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription) + } + public interface SnapshotFileInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -4685,6 +6173,11 @@ public final class SnapshotProtos { // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDataManifest) } + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_SnapshotDescription_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotFileInfo_descriptor; private static @@ -4719,39 +6212,52 @@ public final class SnapshotProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\016Snapshot.proto\022\010hbase.pb\032\010FS.proto\032\013HB" + - "ase.proto\"\222\001\n\020SnapshotFileInfo\022-\n\004type\030\001" + - " \002(\0162\037.hbase.pb.SnapshotFileInfo.Type\022\r\n" + - "\005hfile\030\003 \001(\t\022\022\n\nwal_server\030\004 \001(\t\022\020\n\010wal_" + - "name\030\005 \001(\t\"\032\n\004Type\022\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\323" + - "\002\n\026SnapshotRegionManifest\022\017\n\007version\030\001 \001" + - "(\005\022)\n\013region_info\030\002 \002(\0132\024.hbase.pb.Regio" + - "nInfo\022B\n\014family_files\030\003 \003(\0132,.hbase.pb.S" + - "napshotRegionManifest.FamilyFiles\032T\n\tSto" + - "reFile\022\014\n\004name\030\001 \002(\t\022&\n\treference\030\002 \001(\0132", - "\023.hbase.pb.Reference\022\021\n\tfile_size\030\003 \001(\004\032" + - "c\n\013FamilyFiles\022\023\n\013family_name\030\001 \002(\014\022?\n\013s" + - "tore_files\030\002 \003(\0132*.hbase.pb.SnapshotRegi" + - "onManifest.StoreFile\"\177\n\024SnapshotDataMani" + - "fest\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Ta" + - "bleSchema\022:\n\020region_manifests\030\002 \003(\0132 .hb" + - "ase.pb.SnapshotRegionManifestBD\n*org.apa" + - "che.hadoop.hbase.protobuf.generatedB\016Sna" + - "pshotProtosH\001\210\001\001\240\001\001" + "\n\016Snapshot.proto\022\010hbase.pb\032\023AccessContro" + + "l.proto\032\010FS.proto\032\013HBase.proto\"\223\002\n\023Snaps" + + "hotDescription\022\014\n\004name\030\001 \002(\t\022\r\n\005table\030\002 " + + "\001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\0227\n\004type\030\004 " + + "\001(\0162\".hbase.pb.SnapshotDescription.Type:" + + "\005FLUSH\022\017\n\007version\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\022<" + + "\n\025users_and_permissions\030\007 \001(\0132\035.hbase.pb" + + ".UsersAndPermissions\".\n\004Type\022\014\n\010DISABLED" + + "\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002\"\222\001\n\020Snapsho" + + "tFileInfo\022-\n\004type\030\001 \002(\0162\037.hbase.pb.Snaps", + "hotFileInfo.Type\022\r\n\005hfile\030\003 \001(\t\022\022\n\nwal_s" + + "erver\030\004 \001(\t\022\020\n\010wal_name\030\005 \001(\t\"\032\n\004Type\022\t\n" + + "\005HFILE\020\001\022\007\n\003WAL\020\002\"\323\002\n\026SnapshotRegionMani" + + "fest\022\017\n\007version\030\001 \001(\005\022)\n\013region_info\030\002 \002" + + "(\0132\024.hbase.pb.RegionInfo\022B\n\014family_files" + + "\030\003 \003(\0132,.hbase.pb.SnapshotRegionManifest" + + ".FamilyFiles\032T\n\tStoreFile\022\014\n\004name\030\001 \002(\t\022" + + "&\n\treference\030\002 \001(\0132\023.hbase.pb.Reference\022" + + "\021\n\tfile_size\030\003 \001(\004\032c\n\013FamilyFiles\022\023\n\013fam" + + "ily_name\030\001 \002(\014\022?\n\013store_files\030\002 \003(\0132*.hb", + "ase.pb.SnapshotRegionManifest.StoreFile\"" + + "\177\n\024SnapshotDataManifest\022+\n\014table_schema\030" + + "\001 \002(\0132\025.hbase.pb.TableSchema\022:\n\020region_m" + + "anifests\030\002 \003(\0132 .hbase.pb.SnapshotRegion" + + "ManifestBD\n*org.apache.hadoop.hbase.prot" + + "obuf.generatedB\016SnapshotProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; - internal_static_hbase_pb_SnapshotFileInfo_descriptor = + internal_static_hbase_pb_SnapshotDescription_descriptor = getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_SnapshotDescription_descriptor, + new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", "UsersAndPermissions", }); + internal_static_hbase_pb_SnapshotFileInfo_descriptor = + getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SnapshotFileInfo_descriptor, new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", }); internal_static_hbase_pb_SnapshotRegionManifest_descriptor = - getDescriptor().getMessageTypes().get(1); + getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SnapshotRegionManifest_descriptor, @@ -4769,7 +6275,7 @@ public final class SnapshotProtos { internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor, new java.lang.String[] { "FamilyName", "StoreFiles", }); internal_static_hbase_pb_SnapshotDataManifest_descriptor = - getDescriptor().getMessageTypes().get(2); + getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SnapshotDataManifest_descriptor, @@ -4780,6 +6286,7 @@ public final class SnapshotProtos { com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); diff --git a/hbase-protocol/src/main/protobuf/HBase.proto b/hbase-protocol/src/main/protobuf/HBase.proto index 820dbebe935..279eb39d143 100644 --- a/hbase-protocol/src/main/protobuf/HBase.proto +++ b/hbase-protocol/src/main/protobuf/HBase.proto @@ -157,23 +157,6 @@ message NameInt64Pair { optional int64 value = 2; } -/** - * Description of the snapshot to take - */ -message SnapshotDescription { - required string name = 1; - optional string table = 2; // not needed for delete, but checked for in taking snapshot - optional int64 creation_time = 3 [default = 0]; - enum Type { - DISABLED = 0; - FLUSH = 1; - SKIPFLUSH = 2; - } - optional Type type = 4 [default = FLUSH]; - optional int32 version = 5; - optional string owner = 6; -} - /** * Description of the distributed procedure to take */ diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto index 1c60465265b..d19856bc595 100644 --- a/hbase-protocol/src/main/protobuf/Master.proto +++ b/hbase-protocol/src/main/protobuf/Master.proto @@ -32,6 +32,7 @@ import "ClusterStatus.proto"; import "ErrorHandling.proto"; import "Procedure.proto"; import "Quota.proto"; +import "Snapshot.proto"; /* Column-level protobufs */ @@ -391,6 +392,7 @@ message DeleteSnapshotResponse { message RestoreSnapshotRequest { required SnapshotDescription snapshot = 1; + optional bool restoreACL = 2 [default=false]; } message RestoreSnapshotResponse { diff --git a/hbase-protocol/src/main/protobuf/Snapshot.proto b/hbase-protocol/src/main/protobuf/Snapshot.proto index ae1a1e680ac..015787df955 100644 --- a/hbase-protocol/src/main/protobuf/Snapshot.proto +++ b/hbase-protocol/src/main/protobuf/Snapshot.proto @@ -23,9 +23,28 @@ option java_generic_services = true; option java_generate_equals_and_hash = true; option optimize_for = SPEED; +import "AccessControl.proto"; import "FS.proto"; import "HBase.proto"; +/** + * Description of the snapshot to take + */ +message SnapshotDescription { + required string name = 1; + optional string table = 2; // not needed for delete, but checked for in taking snapshot + optional int64 creation_time = 3 [default = 0]; + enum Type { + DISABLED = 0; + FLUSH = 1; + SKIPFLUSH = 2; + } + optional Type type = 4 [default = FLUSH]; + optional int32 version = 5; + optional string owner = 6; + optional UsersAndPermissions users_and_permissions = 7; +} + message SnapshotFileInfo { enum Type { HFILE = 1; diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon index 0ecc131b709..58032976c77 100644 --- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon +++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon @@ -49,7 +49,7 @@ org.apache.hadoop.hbase.HTableDescriptor; org.apache.hadoop.hbase.HBaseConfiguration; org.apache.hadoop.hbase.TableName; org.apache.hadoop.hbase.tool.Canary; -org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; org.apache.hadoop.hbase.master.DeadServer; org.apache.hadoop.hbase.protobuf.ProtobufUtil; org.apache.hadoop.hbase.security.visibility.VisibilityConstants; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java index 2d8fdbacc5c..b2f76d2dd5d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java @@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import java.io.IOException; import java.util.List; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java index 5fe80c0deb4..f747599c61c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java @@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import java.io.IOException; import java.util.List; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java index 24c62b2dfea..75581479b9d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Defines coprocessor hooks for interacting with operations on the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java index 75c6fc5a5f6..d06fdf902a9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java index 086d72a62e2..9fb8d81d234 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.coprocessor.*; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Provides the coprocessor framework and environment for master oriented diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index c678c86de0e..0155e8a87bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -60,7 +60,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStor import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; @@ -186,6 +185,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Repor import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.User; @@ -1311,7 +1311,8 @@ public class MasterRpcServices extends RSRpcServices master.ensureNamespaceExists(dstTable.getNamespaceAsString()); SnapshotDescription reqSnapshot = request.getSnapshot(); - master.snapshotManager.restoreSnapshot(reqSnapshot); + master.snapshotManager.restoreSnapshot(reqSnapshot, + request.hasRestoreACL() && request.getRestoreACL()); return RestoreSnapshotResponse.newBuilder().build(); } catch (ForeignException e) { throw new ServiceException(e.getCause()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java index 2f769f3c3c2..0f1f495b09d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.master; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.errorhandling.ForeignException; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Watch the current snapshot under process diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java index 2a6dca8951c..ff59ea12d8b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel; import org.apache.hadoop.hbase.master.handler.CreateTableHandler; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; @@ -63,6 +63,7 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot private final static String NAME = "Master CloneSnapshotHandler"; private final SnapshotDescription snapshot; + private final boolean restoreAcl; private final ForeignExceptionDispatcher monitor; private final MetricsSnapshot metricsSnapshot = new MetricsSnapshot(); @@ -73,12 +74,14 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot private volatile boolean stopped = false; public CloneSnapshotHandler(final MasterServices masterServices, - final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) { + final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor, + final boolean restoreAcl) { super(masterServices, masterServices.getMasterFileSystem(), hTableDescriptor, masterServices.getConfiguration(), null, masterServices); // Snapshot information this.snapshot = snapshot; + this.restoreAcl = restoreAcl; // Monitor this.monitor = new ForeignExceptionDispatcher(); @@ -118,6 +121,13 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot Preconditions.checkArgument(!metaChanges.hasRegionsToRemove(), "A clone should not have regions to remove"); + // Clone acl of snapshot into newly created table. + if (restoreAcl && snapshot.hasUsersAndPermissions() + && snapshot.getUsersAndPermissions() != null + && SnapshotDescriptionUtils.isSecurityAvailable(conf)) { + RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, conf); + } + // At this point the clone is complete. Next step is enabling the table. String msg = "Clone snapshot="+ snapshot.getName() +" on table=" + tableName + " completed!"; LOG.info(msg); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java index f574c1469e4..8e40a7d0f3d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java @@ -25,8 +25,6 @@ import java.util.concurrent.ThreadPoolExecutor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HRegionInfo; @@ -34,7 +32,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.master.MasterServices; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotManifest; import org.apache.hadoop.hbase.util.FSUtils; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java index 7e047acdc67..fa4245a3c93 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.procedure.Procedure; import org.apache.hadoop.hbase.procedure.ProcedureCoordinator; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException; import org.apache.hadoop.hbase.util.Pair; @@ -49,7 +49,7 @@ public class EnabledTableSnapshotHandler extends TakeSnapshotHandler { private final ProcedureCoordinator coordinator; public EnabledTableSnapshotHandler(SnapshotDescription snapshot, MasterServices master, - final SnapshotManager manager) { + final SnapshotManager manager) { super(snapshot, master); this.coordinator = manager.getCoordinator(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java index bb54fc30596..73b21989ae1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.master.MasterServices; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java index 56faf766d3c..80f151d74b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel; import org.apache.hadoop.hbase.master.handler.TableEventHandler; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; @@ -65,6 +65,7 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho private final HTableDescriptor hTableDescriptor; private final SnapshotDescription snapshot; + private final boolean restoreAcl; private final ForeignExceptionDispatcher monitor; private final MetricsSnapshot metricsSnapshot = new MetricsSnapshot(); @@ -73,11 +74,13 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho private volatile boolean stopped = false; public RestoreSnapshotHandler(final MasterServices masterServices, - final SnapshotDescription snapshot, final HTableDescriptor htd) throws IOException { + final SnapshotDescription snapshot, final HTableDescriptor htd, final boolean restoreAcl) + throws IOException { super(EventType.C_M_RESTORE_SNAPSHOT, htd.getTableName(), masterServices, masterServices); // Snapshot information this.snapshot = snapshot; + this.restoreAcl = restoreAcl; // Monitor this.monitor = new ForeignExceptionDispatcher(); @@ -166,6 +169,14 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho } metaChanges.updateMetaParentRegions(this.server.getConnection(), hris); + // 5. restore acl of snapshot into the table. + if (restoreAcl && snapshot.hasUsersAndPermissions() + && snapshot.getUsersAndPermissions() != null + && SnapshotDescriptionUtils.isSecurityAvailable(server.getConfiguration())) { + RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, server.getConfiguration()); + } + + // At this point the restore is complete. Next step is enabling the table. LOG.info("Restore snapshot=" + ClientSnapshotDescriptionUtils.toString(snapshot) + " on table=" + tableName + " completed!"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java index e60043f2499..9c5057119a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java @@ -62,8 +62,8 @@ import org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs; import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinatorRpcs; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.hadoop.hbase.quotas.QuotaExceededException; import org.apache.hadoop.hbase.security.AccessDeniedException; @@ -654,7 +654,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable * @param hTableDescriptor Table Descriptor of the table to create */ synchronized void cloneSnapshot(final SnapshotDescription snapshot, - final HTableDescriptor hTableDescriptor) throws HBaseSnapshotException { + final HTableDescriptor hTableDescriptor, final boolean restoreAcl) + throws HBaseSnapshotException { TableName tableName = hTableDescriptor.getTableName(); // make sure we aren't running a snapshot on the same table @@ -669,7 +670,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable try { CloneSnapshotHandler handler = - new CloneSnapshotHandler(master, snapshot, hTableDescriptor).prepare(); + new CloneSnapshotHandler(master, snapshot, hTableDescriptor, restoreAcl).prepare(); this.executorService.submit(handler); this.restoreHandlers.put(tableName, handler); } catch (Exception e) { @@ -685,7 +686,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable * @param reqSnapshot * @throws IOException */ - public void restoreSnapshot(SnapshotDescription reqSnapshot) throws IOException { + public void restoreSnapshot(SnapshotDescription reqSnapshot, boolean restoreAcl) + throws IOException { FileSystem fs = master.getMasterFileSystem().getFileSystem(); Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(reqSnapshot, rootDir); MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost(); @@ -742,7 +744,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable if (tableRegionCount > 0 && tableRegionCount < snapshotRegionCount) { checkAndUpdateNamespaceRegionQuota(snapshotRegionCount, tableName); } - restoreSnapshot(snapshot, snapshotTableDesc); + restoreSnapshot(snapshot, snapshotTableDesc, restoreAcl); // Update the region quota if snapshotRegionCount is smaller. This step should not fail // because we have reserved enough region quota before hand if (tableRegionCount > 0 && tableRegionCount > snapshotRegionCount) { @@ -776,7 +778,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable } try { checkAndUpdateNamespaceQuota(manifest, tableName); - cloneSnapshot(snapshot, htd); + cloneSnapshot(snapshot, htd, restoreAcl); } catch (IOException e) { this.master.getMasterQuotaManager().removeTableFromNamespaceQuota(tableName); LOG.error("Exception occurred while cloning the snapshot " + snapshot.getName() @@ -825,7 +827,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable * @param hTableDescriptor Table Descriptor */ private synchronized void restoreSnapshot(final SnapshotDescription snapshot, - final HTableDescriptor hTableDescriptor) throws HBaseSnapshotException { + final HTableDescriptor hTableDescriptor, final boolean restoreAcl) + throws HBaseSnapshotException { TableName tableName = hTableDescriptor.getTableName(); // make sure we aren't running a snapshot on the same table @@ -840,7 +843,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable try { RestoreSnapshotHandler handler = - new RestoreSnapshotHandler(master, snapshot, hTableDescriptor).prepare(); + new RestoreSnapshotHandler(master, snapshot, hTableDescriptor, restoreAcl).prepare(); this.executorService.submit(handler); restoreHandlers.put(tableName, handler); } catch (Exception e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java index 39387cbebeb..7b45610c8c4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.master.TableLockManager; import org.apache.hadoop.hbase.master.TableLockManager.TableLock; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 1c44f6a50ba..4089a2ecbab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -155,7 +155,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 6a9360672c8..19c3328a76f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.io.InterruptedIOException; import java.lang.Thread.UncaughtExceptionHandler; -import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.lang.reflect.Constructor; import java.net.BindException; @@ -96,7 +95,6 @@ import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.executor.ExecutorType; import org.apache.hadoop.hbase.fs.HFileSystem; -import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.util.HeapMemorySizeUtil; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java index f0836018245..619248e8c7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java @@ -28,10 +28,9 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.procedure.ProcedureMember; import org.apache.hadoop.hbase.procedure.Subprocedure; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.Region; -import org.apache.hadoop.hbase.regionserver.Region.Operation; import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.SnapshotSubprocedurePool; import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java index 0f9ac195b92..99a0261ec68 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.procedure.RegionServerProcedureManager; import org.apache.hadoop.hbase.procedure.Subprocedure; import org.apache.hadoop.hbase.procedure.SubprocedureFactory; import org.apache.hadoop.hbase.procedure.ZKProcedureMemberRpcs; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionServerServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index ff75958c1d4..719752660fd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -470,7 +470,7 @@ public class AccessControlLists { return allPerms; } - static ListMultimap getTablePermissions(Configuration conf, + public static ListMultimap getTablePermissions(Configuration conf, TableName tableName) throws IOException { return getPermissions(conf, tableName != null ? tableName.getName() : null, null); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index dd15c3cc1fd..722d9ebc0d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -91,10 +91,10 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.Region; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java index 10d1df89832..e9ca7dc7de5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import java.util.Arrays; import java.util.Locale; @@ -47,7 +47,7 @@ public class CreateSnapshot extends AbstractHBaseTool { this.addRequiredOptWithArg("n", "name", "The name of the created snapshot"); this.addOptWithArg("s", "snapshot_type", "Snapshot Type. FLUSH is default. Posible values are " - + Arrays.toString(HBaseProtos.SnapshotDescription.Type.values())); + + Arrays.toString(SnapshotDescription.Type.values())); } @Override @@ -65,9 +65,9 @@ public class CreateSnapshot extends AbstractHBaseTool { try { connection = ConnectionFactory.createConnection(getConf()); admin = connection.getAdmin(); - HBaseProtos.SnapshotDescription.Type type = HBaseProtos.SnapshotDescription.Type.FLUSH; + SnapshotDescription.Type type = SnapshotDescription.Type.FLUSH; if (snapshotType != null) { - type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase(Locale.ROOT)); + type = SnapshotDescription.Type.valueOf(snapshotName.toUpperCase(Locale.ROOT)); } admin.snapshot(snapshotName, TableName.valueOf(tableName), type); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java index 97d06030d66..6ec0b9020a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java @@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.io.FileLink; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.WALLink; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.util.FSUtils; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java index 0c2e2abdcb2..39887c6bd7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java @@ -29,10 +29,12 @@ import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ThreadPoolExecutor; +import com.google.common.collect.ListMultimap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -51,11 +53,14 @@ import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; +import org.apache.hadoop.hbase.security.access.AccessControlClient; +import org.apache.hadoop.hbase.security.access.TablePermission; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.ModifyRegionUtils; @@ -602,7 +607,7 @@ public class RestoreSnapshotHelper { * * @param familyDir destination directory for the store file * @param regionInfo destination region info for the table - * @param hfileName reference file name + * @param storeFile reference file name */ private void restoreReferenceFile(final Path familyDir, final HRegionInfo regionInfo, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { @@ -741,4 +746,25 @@ public class RestoreSnapshotHelper { } return metaChanges; } + + public static void restoreSnapshotACL(SnapshotDescription snapshot, TableName newTableName, + Configuration conf) throws IOException { + if (snapshot.hasUsersAndPermissions() && snapshot.getUsersAndPermissions() != null) { + LOG.info("Restore snapshot acl to table. snapshot: " + snapshot + ", table: " + newTableName); + ListMultimap perms = + ProtobufUtil.toUserTablePermissions(snapshot.getUsersAndPermissions()); + try { + for (Entry e : perms.entries()) { + String user = e.getKey(); + TablePermission perm = e.getValue(); + perm.setTableName(newTableName); + AccessControlClient.grant(conf, perm.getTableName(), user, perm.getFamily(), + perm.getQualifier(), perm.getActions()); + } + } catch (Throwable e) { + throw new IOException("Grant acl into newly creatd table failed. snapshot: " + snapshot + + ", table: " + newTableName, e); + } + } + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java index aebb6478ab6..b735d6491ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.snapshot; -import java.io.FileNotFoundException; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Collections; +import com.google.common.collect.ListMultimap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -30,10 +31,16 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.snapshot.SnapshotManifestV2; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.TablePermission; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; @@ -246,7 +253,7 @@ public final class SnapshotDescriptionUtils { * {@link SnapshotDescription}. */ public static SnapshotDescription validate(SnapshotDescription snapshot, Configuration conf) - throws IllegalArgumentException { + throws IllegalArgumentException, IOException { if (!snapshot.hasTable()) { throw new IllegalArgumentException( "Descriptor doesn't apply to a table, so we can't build it."); @@ -262,6 +269,12 @@ public final class SnapshotDescriptionUtils { builder.setCreationTime(time); snapshot = builder.build(); } + + // set the acl to snapshot if security feature is enabled. + if(isSecurityAvailable(conf)){ + snapshot = writeAclToSnapshotDescription(snapshot, conf); + } + return snapshot; } @@ -306,7 +319,7 @@ public final class SnapshotDescriptionUtils { } /** - * Read in the {@link org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription} stored for the snapshot in the passed directory + * Read in the {@link SnapshotDescription} stored for the snapshot in the passed directory * @param fs filesystem where the snapshot was taken * @param snapshotDir directory where the snapshot was stored * @return the stored snapshot description @@ -364,4 +377,32 @@ public final class SnapshotDescriptionUtils { if (!snapshot.hasOwner()) return false; return snapshot.getOwner().equals(user.getShortName()); } + + public static boolean isSecurityAvailable(Configuration conf) throws IOException { + Connection conn = ConnectionFactory.createConnection(conf); + try { + Admin admin = conn.getAdmin(); + try { + return admin.tableExists(AccessControlLists.ACL_TABLE_NAME); + } finally { + admin.close(); + } + } finally { + conn.close(); + } + } + + private static SnapshotDescription writeAclToSnapshotDescription( + final SnapshotDescription snapshot, final Configuration conf) throws IOException { + ListMultimap perms = + User.runAsLoginUser(new PrivilegedExceptionAction>() { + @Override + public ListMultimap run() throws Exception { + return AccessControlLists.getTablePermissions(conf, + TableName.valueOf(snapshot.getTable())); + } + }); + return snapshot.toBuilder().setUsersAndPermissions(ProtobufUtil.toUserTablePermissions(perms)) + .build(); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java index 53abcb2b31d..bd3d0c2a06f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; @@ -51,7 +52,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.WALLink; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.util.FSUtils; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 8e5b21d597e..e4cac95836e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.HRegion; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java index a5afb919692..89d0ba22130 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java @@ -36,7 +36,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java index 02c2eeaea93..82ec2826a04 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java @@ -40,7 +40,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.util.ByteStringer; @@ -126,7 +126,7 @@ public final class SnapshotManifestV2 { } static List loadRegionManifests(final Configuration conf, - final Executor executor,final FileSystem fs, final Path snapshotDir, + final Executor executor, final FileSystem fs, final Path snapshotDir, final SnapshotDescription desc, final int manifestSizeLimit) throws IOException { FileStatus[] manifestFiles = FSUtils.listStatus(fs, snapshotDir, new PathFilter() { @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java index 56699d61365..0f09f066e4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java @@ -39,7 +39,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.HFileLink; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp index 6099a2181bb..f5a0d9e66b2 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp @@ -24,7 +24,7 @@ import="org.apache.hadoop.hbase.client.HConnectionManager" import="org.apache.hadoop.hbase.master.HMaster" import="org.apache.hadoop.hbase.snapshot.SnapshotInfo" - import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription" + import="org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription" import="org.apache.hadoop.util.StringUtils" import="org.apache.hadoop.hbase.TableName" import="org.apache.hadoop.hbase.HBaseConfiguration" %> diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp index dceed8ee1d8..289a72cb201 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp @@ -27,7 +27,7 @@ import="org.apache.hadoop.hbase.HBaseConfiguration" import="org.apache.hadoop.hbase.client.Admin" import="org.apache.hadoop.hbase.master.HMaster" - import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription" + import="org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription" import="org.apache.hadoop.hbase.snapshot.SnapshotInfo" import="org.apache.hadoop.hbase.TableName" import="org.apache.hadoop.util.StringUtils" %> diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index 9295d88e28a..85c3ddf8e3f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -34,10 +34,10 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; import org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java new file mode 100644 index 00000000000..6e9f1dbf61d --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java @@ -0,0 +1,243 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.master.MasterCoprocessorHost; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlConstants; +import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.security.access.Permission; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; + +@Category(MediumTests.class) +public class TestSnapshotWithAcl extends SecureTestUtil { + + public TableName TEST_TABLE = TableName.valueOf("TestSnapshotWithAcl"); + + private static final int ROW_COUNT = 30000; + + private static byte[] TEST_FAMILY = Bytes.toBytes("f1"); + private static byte[] TEST_QUALIFIER = Bytes.toBytes("cq"); + private static byte[] TEST_ROW = Bytes.toBytes(0); + private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static Configuration conf; + private static HBaseAdmin admin = null; + + // user is table owner. will have all permissions on table + private static User USER_OWNER; + // user with rw permissions on column family. + private static User USER_RW; + // user with read-only permissions + private static User USER_RO; + // user with none permissions + private static User USER_NONE; + + static class AccessReadAction implements AccessTestAction { + + private TableName tableName; + + public AccessReadAction(TableName tableName) { + this.tableName = tableName; + } + + @Override + public Object run() throws Exception { + Get g = new Get(TEST_ROW); + g.addFamily(TEST_FAMILY); + HTable t = new HTable(conf, tableName); + try { + t.get(g); + } finally { + t.close(); + } + return null; + } + }; + + static class AccessWriteAction implements AccessTestAction { + private TableName tableName; + + public AccessWriteAction(TableName tableName) { + this.tableName = tableName; + } + + @Override + public Object run() throws Exception { + Put p = new Put(TEST_ROW); + p.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1)); + HTable t = new HTable(conf, tableName); + try { + t.put(p); + } finally { + t.close(); + } + return null; + } + } + + @BeforeClass + public static void setupBeforeClass() throws Exception { + conf = TEST_UTIL.getConfiguration(); + // Enable security + enableSecurity(conf); + conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName()); + // Verify enableSecurity sets up what we require + verifyConfiguration(conf); + // Enable EXEC permission checking + conf.setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true); + TEST_UTIL.startMiniCluster(); + MasterCoprocessorHost cpHost = + TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterCoprocessorHost(); + cpHost.load(AccessController.class, Coprocessor.PRIORITY_HIGHEST, conf); + + USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]); + USER_RW = User.createUserForTesting(conf, "rwuser", new String[0]); + USER_RO = User.createUserForTesting(conf, "rouser", new String[0]); + USER_NONE = User.createUserForTesting(conf, "usernone", new String[0]); + } + + @Before + public void setUp() throws Exception { + admin = TEST_UTIL.getHBaseAdmin(); + HTableDescriptor htd = new HTableDescriptor(TEST_TABLE); + HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY); + hcd.setMaxVersions(100); + htd.addFamily(hcd); + htd.setOwner(USER_OWNER); + admin.createTable(htd, new byte[][] { Bytes.toBytes("s") }); + TEST_UTIL.waitTableEnabled(TEST_TABLE); + + grantOnTable(TEST_UTIL, USER_RW.getShortName(), TEST_TABLE, TEST_FAMILY, null, + Permission.Action.READ, Permission.Action.WRITE); + + grantOnTable(TEST_UTIL, USER_RO.getShortName(), TEST_TABLE, TEST_FAMILY, null, + Permission.Action.READ); + } + + private void loadData() throws IOException { + HTable hTable = new HTable(conf, TEST_TABLE); + try { + for (int i = 0; i < ROW_COUNT; i++) { + Put put = new Put(Bytes.toBytes(i)); + put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i)); + hTable.put(put); + } + hTable.flushCommits(); + } finally { + hTable.close(); + } + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + private void verifyRows(TableName tableName) throws IOException { + HTable hTable = new HTable(conf, tableName); + try { + Scan scan = new Scan(); + ResultScanner scanner = hTable.getScanner(scan); + Result result; + int rowCount = 0; + while ((result = scanner.next()) != null) { + byte[] value = result.getValue(TEST_FAMILY, TEST_QUALIFIER); + Assert.assertArrayEquals(value, Bytes.toBytes(rowCount++)); + } + Assert.assertEquals(rowCount, ROW_COUNT); + } finally { + hTable.close(); + } + } + + @Test + public void testRestoreSnapshot() throws Exception { + verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RO, USER_RW); + verifyDenied(new AccessReadAction(TEST_TABLE), USER_NONE); + verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW); + verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE); + + loadData(); + verifyRows(TEST_TABLE); + + String snapshotName1 = "testSnapshot1"; + admin.snapshot(snapshotName1, TEST_TABLE); + + // clone snapshot with restoreAcl true. + TableName tableName1 = TableName.valueOf("tableName1"); + admin.cloneSnapshot(snapshotName1, tableName1, true); + verifyRows(tableName1); + verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW); + verifyDenied(new AccessReadAction(tableName1), USER_NONE); + verifyAllowed(new AccessWriteAction(tableName1), USER_OWNER, USER_RW); + verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE); + + // clone snapshot with restoreAcl false. + TableName tableName2 = TableName.valueOf("tableName2"); + admin.cloneSnapshot(snapshotName1, tableName2, false); + verifyRows(tableName2); + verifyAllowed(new AccessReadAction(tableName2), USER_OWNER); + verifyDenied(new AccessReadAction(tableName2), USER_NONE, USER_RO, USER_RW); + verifyAllowed(new AccessWriteAction(tableName2), USER_OWNER); + verifyDenied(new AccessWriteAction(tableName2), USER_RO, USER_RW, USER_NONE); + + // remove read permission for USER_RO. + revokeFromTable(TEST_UTIL, USER_RO.getShortName(), TEST_TABLE, TEST_FAMILY, null, + Permission.Action.READ); + verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RW); + verifyDenied(new AccessReadAction(TEST_TABLE), USER_RO, USER_NONE); + verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW); + verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE); + + // restore snapshot with restoreAcl false. + admin.disableTable(TEST_TABLE); + admin.restoreSnapshot(snapshotName1, false, false); + admin.enableTable(TEST_TABLE); + verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RW); + verifyDenied(new AccessReadAction(TEST_TABLE), USER_RO, USER_NONE); + verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW); + verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE); + + // restore snapshot with restoreAcl true. + admin.disableTable(TEST_TABLE); + admin.restoreSnapshot(snapshotName1, false, true); + admin.enableTable(TEST_TABLE); + verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RO, USER_RW); + verifyDenied(new AccessReadAction(TEST_TABLE), USER_NONE); + verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW); + verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java index 54745f53c0e..0851267c4cd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java @@ -59,10 +59,10 @@ import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 2dacd445036..4514ae2b5e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -41,7 +42,6 @@ import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.snapshot.DisabledTableSnapshotHandler; import org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index 1907a570448..e067122455f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index e208fae8906..2c18a824dc2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -168,7 +168,7 @@ public class SecureTestUtil { * To indicate the action was not allowed, either throw an AccessDeniedException * or return an empty list of KeyValues. */ - static interface AccessTestAction extends PrivilegedExceptionAction { } + public static interface AccessTestAction extends PrivilegedExceptionAction { } /** This fails only in case of ADE or empty list for any of the actions. */ public static void verifyAllowed(User user, AccessTestAction... actions) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 99112d39561..51aeff8a312 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ProcedureInfo; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.MiniHBaseCluster; @@ -108,7 +109,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java index e64c6c8c37e..23e5f3faf00 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 1e67718ce95..403fffa1472 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -59,7 +59,7 @@ import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; @@ -263,8 +263,8 @@ public final class SnapshotTestingUtils { * @param sleep: amount to sleep between checks to see if the snapshot is done * @throws ServiceException if the snapshot fails */ - public static void waitForSnapshotToComplete(HMaster master, - SnapshotDescription snapshot, long sleep) throws ServiceException { + public static void waitForSnapshotToComplete(HMaster master, SnapshotDescription snapshot, + long sleep) throws ServiceException { final IsSnapshotDoneRequest request = IsSnapshotDoneRequest.newBuilder() .setSnapshot(snapshot).build(); IsSnapshotDoneResponse done = IsSnapshotDoneResponse.newBuilder() diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java index a3f5382c5b2..c2139d963bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java index 667c0156370..9289a020d8f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java index 37f3c4c7cd4..442ac0dd7fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java @@ -23,12 +23,12 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java index cd5a5fb4300..8746ea3e39f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java @@ -31,11 +31,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.monitoring.MonitoredTask; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock; import org.apache.hadoop.hbase.util.FSTableDescriptors; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java index 8ebeb97ed5e..01452ebaab1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.coprocessor.BaseMasterObserver; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.TestTableName; import org.junit.After; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java index 7827aadb745..68377de876b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.snapshot; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; @@ -30,8 +29,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.junit.After; import org.junit.BeforeClass; @@ -77,6 +76,8 @@ public class TestSnapshotDescriptionUtils { fail("Snapshot was considered valid without a table name"); } catch (IllegalArgumentException e) { LOG.debug("Correctly failed when snapshot doesn't have a tablename"); + } catch (IOException e) { + LOG.debug("Correctly failed when saving acl into snapshot"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java index 89337ba7d75..461ea98b9c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -28,8 +27,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; diff --git a/hbase-shell/src/main/ruby/hbase.rb b/hbase-shell/src/main/ruby/hbase.rb index cb0519462fb..88a6f047fc7 100644 --- a/hbase-shell/src/main/ruby/hbase.rb +++ b/hbase-shell/src/main/ruby/hbase.rb @@ -87,6 +87,7 @@ module HBaseConstants DATA = 'DATA' SERVER_NAME = 'SERVER_NAME' LOCALITY_THRESHOLD = 'LOCALITY_THRESHOLD' + RESTORE_ACL = 'RESTORE_ACL' # Load constants from hbase java API def self.promote_constants(constants) diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb index 9925df3e7df..41f53dcff27 100644 --- a/hbase-shell/src/main/ruby/hbase/admin.rb +++ b/hbase-shell/src/main/ruby/hbase/admin.rb @@ -24,7 +24,7 @@ java_import org.apache.hadoop.hbase.util.RegionSplitter java_import org.apache.hadoop.hbase.util.Bytes java_import org.apache.hadoop.hbase.ServerName java_import org.apache.hadoop.hbase.TableName -java_import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos::SnapshotDescription +java_import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos::SnapshotDescription # Wrapper for org.apache.hadoop.hbase.client.HBaseAdmin @@ -907,14 +907,16 @@ module Hbase #---------------------------------------------------------------------------------------------- # Restore specified snapshot - def restore_snapshot(snapshot_name) - @admin.restoreSnapshot(snapshot_name.to_java_bytes) + def restore_snapshot(snapshot_name, restore_acl = false) + conf = @connection.getConfiguration + take_fail_safe_snapshot = conf.getBoolean("hbase.snapshot.restore.take.failsafe.snapshot", false) + @admin.restoreSnapshot(snapshot_name, take_fail_safe_snapshot, restore_acl) end #---------------------------------------------------------------------------------------------- # Create a new table by cloning the snapshot content - def clone_snapshot(snapshot_name, table) - @admin.cloneSnapshot(snapshot_name.to_java_bytes, table.to_java_bytes) + def clone_snapshot(snapshot_name, table, restore_acl = false) + @admin.cloneSnapshot(snapshot_name, org.apache.hadoop.hbase::TableName.valueOf(table), restore_acl) end #---------------------------------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb b/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb index 0498c8e3e4f..da82dd2a661 100644 --- a/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb +++ b/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb @@ -28,12 +28,19 @@ And writing on the newly created table will not influence the snapshot data. Examples: hbase> clone_snapshot 'snapshotName', 'tableName' hbase> clone_snapshot 'snapshotName', 'namespace:tableName' + +Following command will restore all acl from origin snapshot table into the +newly created table. + + hbase> clone_snapshot 'snapshotName', 'namespace:tableName', {RESTORE_ACL=>true} EOF end - def command(snapshot_name, table) + def command(snapshot_name, table, args = {}) format_simple_command do - admin.clone_snapshot(snapshot_name, table) + raise(ArgumentError, "Arguments should be a Hash") unless args.kind_of?(Hash) + restore_acl = args.delete(RESTORE_ACL) || false + admin.clone_snapshot(snapshot_name, table, restore_acl) end end diff --git a/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb b/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb index 4d531711bca..54931cf753a 100644 --- a/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb +++ b/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb @@ -28,12 +28,18 @@ The table must be disabled. Examples: hbase> restore_snapshot 'snapshotName' + +Following command will restore all acl from snapshot into the table. + + hbase> restore_snapshot 'snapshotName', {RESTORE_ACL=>true} EOF end - def command(snapshot_name) + def command(snapshot_name, args = {}) format_simple_command do - admin.restore_snapshot(snapshot_name) + raise(ArgumentError, "Arguments should be a Hash") unless args.kind_of?(Hash) + restore_acl = args.delete(RESTORE_ACL) || false + admin.restore_snapshot(snapshot_name, restore_acl) end end end