HBASE-16769 Deprecate/remove PB references from MasterObserver and RegionServerObserver

This commit is contained in:
anoopsamjohn 2017-09-26 08:52:23 +05:30
parent 4b208eb2b6
commit 3c7ab81072
14 changed files with 73 additions and 83 deletions

View File

@ -34,12 +34,12 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.Before; import org.junit.Before;

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.constraint.ConstraintException; import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.coprocessor.CoprocessorService; import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
@ -66,7 +67,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RSGroupAdmi
import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupRequest; import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupRequest;
import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupResponse; import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupResponse;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MasterSwitchType; import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.locking.LockProcedure; import org.apache.hadoop.hbase.master.locking.LockProcedure;
@ -44,7 +45,6 @@ import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings; import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;

View File

@ -19,15 +19,12 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
/** /**
* Defines coprocessor hooks for interacting with operations on the * Defines coprocessor hooks for interacting with operations on the
@ -91,24 +88,28 @@ public interface RegionServerObserver extends Coprocessor {
return endpoint; return endpoint;
} }
// TODO remove below 2 hooks when we implement AC as a core impl than a CP impl.
/** /**
* This will be called before executing replication request to shipping log entries. * This will be called before executing replication request to shipping log entries.
* @param ctx the environment to interact with the framework and region server. * @param ctx the environment to interact with the framework and region server.
* @param entries list of WALEntries to replicate * @deprecated As of release 2.0.0 with out any replacement. This is maintained for internal
* @param cells Cells that the WALEntries refer to (if cells is non-null) * usage by AccessController. Do not use these hooks in custom co-processors.
*/ */
default void preReplicateLogEntries(final ObserverContext<RegionServerCoprocessorEnvironment> ctx, @Deprecated
List<WALEntry> entries, CellScanner cells) throws IOException {} default void preReplicateLogEntries(final ObserverContext<RegionServerCoprocessorEnvironment> ctx)
throws IOException {
}
/** /**
* This will be called after executing replication request to shipping log entries. * This will be called after executing replication request to shipping log entries.
* @param ctx the environment to interact with the framework and region server. * @param ctx the environment to interact with the framework and region server.
* @param entries list of WALEntries to replicate * @deprecated As of release 2.0.0 with out any replacement. This is maintained for internal
* @param cells Cells that the WALEntries refer to (if cells is non-null) * usage by AccessController. Do not use these hooks in custom co-processors.
*/ */
@Deprecated
default void postReplicateLogEntries( default void postReplicateLogEntries(
final ObserverContext<RegionServerCoprocessorEnvironment> ctx, final ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException {
List<WALEntry> entries, CellScanner cells) throws IOException {} }
/** /**
* This will be called before clearing compaction queues * This will be called before clearing compaction queues

View File

@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MasterSwitchType; import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.CoprocessorService; import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
@ -55,7 +56,6 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings; import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**

View File

@ -199,16 +199,18 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
* @throws IOException File system exception * @throws IOException File system exception
*/ */
public List<SnapshotDescription> getCompletedSnapshots() throws IOException { public List<SnapshotDescription> getCompletedSnapshots() throws IOException {
return getCompletedSnapshots(SnapshotDescriptionUtils.getSnapshotsDir(rootDir)); return getCompletedSnapshots(SnapshotDescriptionUtils.getSnapshotsDir(rootDir), true);
} }
/** /**
* Gets the list of all completed snapshots. * Gets the list of all completed snapshots.
* @param snapshotDir snapshot directory * @param snapshotDir snapshot directory
* @param withCpCall Whether to call CP hooks
* @return list of SnapshotDescriptions * @return list of SnapshotDescriptions
* @throws IOException File system exception * @throws IOException File system exception
*/ */
private List<SnapshotDescription> getCompletedSnapshots(Path snapshotDir) throws IOException { private List<SnapshotDescription> getCompletedSnapshots(Path snapshotDir, boolean withCpCall)
throws IOException {
List<SnapshotDescription> snapshotDescs = new ArrayList<>(); List<SnapshotDescription> snapshotDescs = new ArrayList<>();
// first create the snapshot root path and check to see if it exists // first create the snapshot root path and check to see if it exists
FileSystem fs = master.getMasterFileSystem().getFileSystem(); FileSystem fs = master.getMasterFileSystem().getFileSystem();
@ -223,6 +225,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
FileStatus[] snapshots = fs.listStatus(snapshotDir, FileStatus[] snapshots = fs.listStatus(snapshotDir,
new SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter(fs)); new SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter(fs));
MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost(); MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost();
withCpCall = withCpCall && cpHost != null;
// loop through all the completed snapshots // loop through all the completed snapshots
for (FileStatus snapshot : snapshots) { for (FileStatus snapshot : snapshots) {
Path info = new Path(snapshot.getPath(), SnapshotDescriptionUtils.SNAPSHOTINFO_FILE); Path info = new Path(snapshot.getPath(), SnapshotDescriptionUtils.SNAPSHOTINFO_FILE);
@ -235,9 +238,11 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
try { try {
in = fs.open(info); in = fs.open(info);
SnapshotDescription desc = SnapshotDescription.parseFrom(in); SnapshotDescription desc = SnapshotDescription.parseFrom(in);
if (cpHost != null) { org.apache.hadoop.hbase.client.SnapshotDescription descPOJO = (withCpCall)
? ProtobufUtil.createSnapshotDesc(desc) : null;
if (withCpCall) {
try { try {
cpHost.preListSnapshot(desc); cpHost.preListSnapshot(descPOJO);
} catch (AccessDeniedException e) { } catch (AccessDeniedException e) {
LOG.warn("Current user does not have access to " + desc.getName() + " snapshot. " LOG.warn("Current user does not have access to " + desc.getName() + " snapshot. "
+ "Either you should be owner of this snapshot or admin user."); + "Either you should be owner of this snapshot or admin user.");
@ -248,8 +253,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
snapshotDescs.add(desc); snapshotDescs.add(desc);
// call coproc post hook // call coproc post hook
if (cpHost != null) { if (withCpCall) {
cpHost.postListSnapshot(desc); cpHost.postListSnapshot(descPOJO);
} }
} catch (IOException e) { } catch (IOException e) {
LOG.warn("Found a corrupted snapshot " + snapshot.getPath(), e); LOG.warn("Found a corrupted snapshot " + snapshot.getPath(), e);
@ -300,8 +305,10 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
// call coproc pre hook // call coproc pre hook
MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost(); MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost();
org.apache.hadoop.hbase.client.SnapshotDescription snapshotPOJO = null;
if (cpHost != null) { if (cpHost != null) {
cpHost.preDeleteSnapshot(snapshot); snapshotPOJO = ProtobufUtil.createSnapshotDesc(snapshot);
cpHost.preDeleteSnapshot(snapshotPOJO);
} }
LOG.debug("Deleting snapshot: " + snapshotName); LOG.debug("Deleting snapshot: " + snapshotName);
@ -312,7 +319,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
// call coproc post hook // call coproc post hook
if (cpHost != null) { if (cpHost != null) {
cpHost.postDeleteSnapshot(snapshot); cpHost.postDeleteSnapshot(snapshotPOJO);
} }
} }
@ -587,8 +594,10 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
// call pre coproc hook // call pre coproc hook
MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost(); MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost();
org.apache.hadoop.hbase.client.SnapshotDescription snapshotPOJO = null;
if (cpHost != null) { if (cpHost != null) {
cpHost.preSnapshot(snapshot, desc); snapshotPOJO = ProtobufUtil.createSnapshotDesc(snapshot);
cpHost.preSnapshot(snapshotPOJO, desc);
} }
// if the table is enabled, then have the RS run actually the snapshot work // if the table is enabled, then have the RS run actually the snapshot work
@ -616,7 +625,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
// call post coproc hook // call post coproc hook
if (cpHost != null) { if (cpHost != null) {
cpHost.postSnapshot(snapshot, desc); cpHost.postSnapshot(snapshotPOJO, desc);
} }
} }
@ -684,8 +693,10 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
final NonceKey nonceKey, final boolean restoreAcl) throws IOException { final NonceKey nonceKey, final boolean restoreAcl) throws IOException {
MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost(); MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost();
TableDescriptor htd = TableDescriptorBuilder.copy(tableName, snapshotTableDesc); TableDescriptor htd = TableDescriptorBuilder.copy(tableName, snapshotTableDesc);
org.apache.hadoop.hbase.client.SnapshotDescription snapshotPOJO = null;
if (cpHost != null) { if (cpHost != null) {
cpHost.preCloneSnapshot(reqSnapshot, htd); snapshotPOJO = ProtobufUtil.createSnapshotDesc(reqSnapshot);
cpHost.preCloneSnapshot(snapshotPOJO, htd);
} }
long procId; long procId;
try { try {
@ -698,7 +709,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
LOG.info("Clone snapshot=" + snapshot.getName() + " as table=" + tableName); LOG.info("Clone snapshot=" + snapshot.getName() + " as table=" + tableName);
if (cpHost != null) { if (cpHost != null) {
cpHost.postCloneSnapshot(reqSnapshot, htd); cpHost.postCloneSnapshot(snapshotPOJO, htd);
} }
return procId; return procId;
} }
@ -812,8 +823,10 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
} }
// call Coprocessor pre hook // call Coprocessor pre hook
org.apache.hadoop.hbase.client.SnapshotDescription snapshotPOJO = null;
if (cpHost != null) { if (cpHost != null) {
cpHost.preRestoreSnapshot(reqSnapshot, snapshotTableDesc); snapshotPOJO = ProtobufUtil.createSnapshotDesc(reqSnapshot);
cpHost.preRestoreSnapshot(snapshotPOJO, snapshotTableDesc);
} }
long procId; long procId;
@ -827,7 +840,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
LOG.info("Restore snapshot=" + snapshot.getName() + " as table=" + tableName); LOG.info("Restore snapshot=" + snapshot.getName() + " as table=" + tableName);
if (cpHost != null) { if (cpHost != null) {
cpHost.postRestoreSnapshot(reqSnapshot, snapshotTableDesc); cpHost.postRestoreSnapshot(snapshotPOJO, snapshotTableDesc);
} }
return procId; return procId;
@ -1045,7 +1058,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
// check if an older version of snapshot directory was present // check if an older version of snapshot directory was present
Path oldSnapshotDir = new Path(mfs.getRootDir(), HConstants.OLD_SNAPSHOT_DIR_NAME); Path oldSnapshotDir = new Path(mfs.getRootDir(), HConstants.OLD_SNAPSHOT_DIR_NAME);
FileSystem fs = mfs.getFileSystem(); FileSystem fs = mfs.getFileSystem();
List<SnapshotDescription> ss = getCompletedSnapshots(new Path(rootDir, oldSnapshotDir)); List<SnapshotDescription> ss = getCompletedSnapshots(new Path(rootDir, oldSnapshotDir), false);
if (ss != null && !ss.isEmpty()) { if (ss != null && !ss.isEmpty()) {
LOG.error("Snapshots from an earlier release were found under: " + oldSnapshotDir); LOG.error("Snapshots from an earlier release were found under: " + oldSnapshotDir);
LOG.error("Please rename the directory as " + HConstants.SNAPSHOT_DIR_NAME); LOG.error("Please rename the directory as " + HConstants.SNAPSHOT_DIR_NAME);

View File

@ -2177,11 +2177,11 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
requestCount.increment(); requestCount.increment();
List<WALEntry> entries = request.getEntryList(); List<WALEntry> entries = request.getEntryList();
CellScanner cellScanner = ((HBaseRpcController)controller).cellScanner(); CellScanner cellScanner = ((HBaseRpcController)controller).cellScanner();
regionServer.getRegionServerCoprocessorHost().preReplicateLogEntries(entries, cellScanner); regionServer.getRegionServerCoprocessorHost().preReplicateLogEntries();
regionServer.replicationSinkHandler.replicateLogEntries(entries, cellScanner, regionServer.replicationSinkHandler.replicateLogEntries(entries, cellScanner,
request.getReplicationClusterId(), request.getSourceBaseNamespaceDirPath(), request.getReplicationClusterId(), request.getSourceBaseNamespaceDirPath(),
request.getSourceHFileArchiveDirPath()); request.getSourceHFileArchiveDirPath());
regionServer.getRegionServerCoprocessorHost().postReplicateLogEntries(entries, cellScanner); regionServer.getRegionServerCoprocessorHost().postReplicateLogEntries();
return ReplicateWALEntryResponse.newBuilder().build(); return ReplicateWALEntryResponse.newBuilder().build();
} else { } else {
throw new ServiceException("Replication services are not initialized yet"); throw new ServiceException("Replication services are not initialized yet");

View File

@ -26,7 +26,6 @@ import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@ -42,7 +41,6 @@ import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
@ -115,24 +113,24 @@ public class RegionServerCoprocessorHost extends
}); });
} }
public void preReplicateLogEntries(final List<WALEntry> entries, final CellScanner cells) public void preReplicateLogEntries()
throws IOException { throws IOException {
execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override @Override
public void call(RegionServerObserver oserver, public void call(RegionServerObserver oserver,
ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException { ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException {
oserver.preReplicateLogEntries(ctx, entries, cells); oserver.preReplicateLogEntries(ctx);
} }
}); });
} }
public void postReplicateLogEntries(final List<WALEntry> entries, final CellScanner cells) public void postReplicateLogEntries()
throws IOException { throws IOException {
execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override @Override
public void call(RegionServerObserver oserver, public void call(RegionServerObserver oserver,
ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException { ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException {
oserver.postReplicateLogEntries(ctx, entries, cells); oserver.postReplicateLogEntries(ctx);
} }
}); });
} }

View File

@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.client.Query;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver; import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
@ -121,10 +122,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -2622,16 +2621,11 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
} }
@Override @Override
public void preReplicateLogEntries(ObserverContext<RegionServerCoprocessorEnvironment> ctx, public void preReplicateLogEntries(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
List<WALEntry> entries, CellScanner cells) throws IOException { throws IOException {
requirePermission(getActiveUser(ctx), "replicateLogEntries", Action.WRITE); requirePermission(getActiveUser(ctx), "replicateLogEntries", Action.WRITE);
} }
@Override
public void postReplicateLogEntries(ObserverContext<RegionServerCoprocessorEnvironment> ctx,
List<WALEntry> entries, CellScanner cells) throws IOException {
}
@Override @Override
public void preClearCompactionQueues(ObserverContext<RegionServerCoprocessorEnvironment> ctx) public void preClearCompactionQueues(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
throws IOException { throws IOException {

View File

@ -373,10 +373,10 @@ public final class SnapshotDescriptionUtils {
* @return true if the user is the owner of the snapshot, * @return true if the user is the owner of the snapshot,
* false otherwise or the snapshot owner field is not present. * false otherwise or the snapshot owner field is not present.
*/ */
public static boolean isSnapshotOwner(final SnapshotDescription snapshot, final User user) { public static boolean isSnapshotOwner(org.apache.hadoop.hbase.client.SnapshotDescription snapshot,
User user) {
if (user == null) return false; if (user == null) return false;
if (!snapshot.hasOwner()) return false; return user.getShortName().equals(snapshot.getOwner());
return snapshot.getOwner().equals(user.getShortName());
} }
public static boolean isSecurityAvailable(Configuration conf) throws IOException { public static boolean isSecurityAvailable(Configuration conf) throws IOException {

View File

@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
@ -66,7 +67,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;

View File

@ -72,6 +72,7 @@ import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@ -136,7 +137,6 @@ import org.junit.rules.TestName;
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos; import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
/** /**
* Performs authorization checks for common operations, according to different * Performs authorization checks for common operations, according to different
@ -2001,10 +2001,8 @@ public class TestAccessController extends SecureTestUtil {
public void testSnapshot() throws Exception { public void testSnapshot() throws Exception {
Admin admin = TEST_UTIL.getAdmin(); Admin admin = TEST_UTIL.getAdmin();
final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE); final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE);
SnapshotDescription.Builder builder = SnapshotDescription.newBuilder(); final SnapshotDescription snapshot = new SnapshotDescription(
builder.setName(TEST_TABLE.getNameAsString() + "-snapshot"); TEST_TABLE.getNameAsString() + "-snapshot", TEST_TABLE);
builder.setTable(TEST_TABLE.getNameAsString());
final SnapshotDescription snapshot = builder.build();
AccessTestAction snapshotAction = new AccessTestAction() { AccessTestAction snapshotAction = new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
@ -2062,11 +2060,9 @@ public class TestAccessController extends SecureTestUtil {
public void testSnapshotWithOwner() throws Exception { public void testSnapshotWithOwner() throws Exception {
Admin admin = TEST_UTIL.getAdmin(); Admin admin = TEST_UTIL.getAdmin();
final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE); final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE);
SnapshotDescription.Builder builder = SnapshotDescription.newBuilder(); final SnapshotDescription snapshot = new SnapshotDescription(
builder.setName(TEST_TABLE.getNameAsString() + "-snapshot"); TEST_TABLE.getNameAsString() + "-snapshot", TEST_TABLE, null, USER_OWNER.getName());
builder.setTable(TEST_TABLE.getNameAsString());
builder.setOwner(USER_OWNER.getName());
final SnapshotDescription snapshot = builder.build();
AccessTestAction snapshotAction = new AccessTestAction() { AccessTestAction snapshotAction = new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
@ -2904,10 +2900,8 @@ public class TestAccessController extends SecureTestUtil {
AccessTestAction replicateLogEntriesAction = new AccessTestAction() { AccessTestAction replicateLogEntriesAction = new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
ACCESS_CONTROLLER.preReplicateLogEntries(ObserverContext.createAndPrepare(RSCP_ENV, null), ACCESS_CONTROLLER.preReplicateLogEntries(ObserverContext.createAndPrepare(RSCP_ENV, null));
null, null); ACCESS_CONTROLLER.postReplicateLogEntries(ObserverContext.createAndPrepare(RSCP_ENV, null));
ACCESS_CONTROLLER.postReplicateLogEntries(ObserverContext.createAndPrepare(RSCP_ENV, null),
null, null);
return null; return null;
} }
}; };

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
@ -65,7 +66,6 @@ import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost;
import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -633,9 +633,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
verifyAllowed(new AccessTestAction() { verifyAllowed(new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
SnapshotDescription snapshot = SnapshotDescription.newBuilder() SnapshotDescription snapshot = new SnapshotDescription("foo");
.setName("foo")
.build();
HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName()); HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName());
ACCESS_CONTROLLER.preSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), ACCESS_CONTROLLER.preSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
snapshot, htd); snapshot, htd);
@ -647,9 +645,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
verifyAllowed(new AccessTestAction() { verifyAllowed(new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
SnapshotDescription snapshot = SnapshotDescription.newBuilder() SnapshotDescription snapshot = new SnapshotDescription("foo");
.setName("foo")
.build();
ACCESS_CONTROLLER.preListSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), ACCESS_CONTROLLER.preListSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
snapshot); snapshot);
return null; return null;
@ -660,9 +656,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
verifyAllowed(new AccessTestAction() { verifyAllowed(new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
SnapshotDescription snapshot = SnapshotDescription.newBuilder() SnapshotDescription snapshot = new SnapshotDescription("foo");
.setName("foo")
.build();
HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName()); HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName());
ACCESS_CONTROLLER.preCloneSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), ACCESS_CONTROLLER.preCloneSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
snapshot, htd); snapshot, htd);
@ -674,9 +668,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
verifyAllowed(new AccessTestAction() { verifyAllowed(new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
SnapshotDescription snapshot = SnapshotDescription.newBuilder() SnapshotDescription snapshot = new SnapshotDescription("foo");
.setName("foo")
.build();
HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName()); HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName());
ACCESS_CONTROLLER.preRestoreSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), ACCESS_CONTROLLER.preRestoreSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
snapshot, htd); snapshot, htd);
@ -688,9 +680,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
verifyAllowed(new AccessTestAction() { verifyAllowed(new AccessTestAction() {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
SnapshotDescription snapshot = SnapshotDescription.newBuilder() SnapshotDescription snapshot = new SnapshotDescription("foo");
.setName("foo")
.build();
ACCESS_CONTROLLER.preDeleteSnapshot(ObserverContext.createAndPrepare(CP_ENV, null), ACCESS_CONTROLLER.preDeleteSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
snapshot); snapshot);
return null; return null;

View File

@ -27,12 +27,12 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.TestTableName;
import org.junit.After; import org.junit.After;