HBASE-16528 Procedure-V2: ServerCrashProcedure misses owner information (Stephen Yuan Jiang)
This commit is contained in:
parent
45af3831fe
commit
605af95132
|
@ -52,8 +52,10 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedException;
|
||||||
import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
|
import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
|
||||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||||
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
|
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
|
||||||
|
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||||
import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
|
import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
|
||||||
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
||||||
|
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.RequestConverter;
|
import org.apache.hadoop.hbase.protobuf.RequestConverter;
|
||||||
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
|
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
|
||||||
|
@ -620,8 +622,9 @@ public class ServerManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean carryingMeta = master.getAssignmentManager().isCarryingMeta(serverName);
|
boolean carryingMeta = master.getAssignmentManager().isCarryingMeta(serverName);
|
||||||
this.master.getMasterProcedureExecutor().
|
ProcedureExecutor<MasterProcedureEnv> procExec = this.master.getMasterProcedureExecutor();
|
||||||
submitProcedure(new ServerCrashProcedure(serverName, true, carryingMeta));
|
procExec.submitProcedure(new ServerCrashProcedure(
|
||||||
|
procExec.getEnvironment(), serverName, true, carryingMeta));
|
||||||
LOG.debug("Added=" + serverName +
|
LOG.debug("Added=" + serverName +
|
||||||
" to dead servers, submitted shutdown handler to be executed meta=" + carryingMeta);
|
" to dead servers, submitted shutdown handler to be executed meta=" + carryingMeta);
|
||||||
|
|
||||||
|
@ -664,8 +667,9 @@ public class ServerManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.deadservers.add(serverName);
|
this.deadservers.add(serverName);
|
||||||
this.master.getMasterProcedureExecutor().
|
ProcedureExecutor<MasterProcedureEnv> procExec = this.master.getMasterProcedureExecutor();
|
||||||
submitProcedure(new ServerCrashProcedure(serverName, shouldSplitWal, false));
|
procExec.submitProcedure(new ServerCrashProcedure(
|
||||||
|
procExec.getEnvironment(), serverName, shouldSplitWal, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -145,12 +145,15 @@ implements ServerProcedureInterface {
|
||||||
* @param shouldSplitWal True if we should split WALs as part of crashed server processing.
|
* @param shouldSplitWal True if we should split WALs as part of crashed server processing.
|
||||||
* @param carryingMeta True if carrying hbase:meta table region.
|
* @param carryingMeta True if carrying hbase:meta table region.
|
||||||
*/
|
*/
|
||||||
public ServerCrashProcedure(final ServerName serverName,
|
public ServerCrashProcedure(
|
||||||
final boolean shouldSplitWal, final boolean carryingMeta) {
|
final MasterProcedureEnv env,
|
||||||
|
final ServerName serverName,
|
||||||
|
final boolean shouldSplitWal,
|
||||||
|
final boolean carryingMeta) {
|
||||||
this.serverName = serverName;
|
this.serverName = serverName;
|
||||||
this.shouldSplitWal = shouldSplitWal;
|
this.shouldSplitWal = shouldSplitWal;
|
||||||
this.carryingMeta = carryingMeta;
|
this.carryingMeta = carryingMeta;
|
||||||
// Currently not used.
|
this.setOwner(env.getRequestUser().getShortName());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.master;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.ServerName;
|
import org.apache.hadoop.hbase.ServerName;
|
||||||
|
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||||
import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
|
import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
|
||||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||||
|
@ -99,8 +100,9 @@ public class TestDeadServer {
|
||||||
@Test(timeout = 15000)
|
@Test(timeout = 15000)
|
||||||
public void testCrashProcedureReplay() {
|
public void testCrashProcedureReplay() {
|
||||||
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
|
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
|
||||||
ProcedureExecutor pExecutor = master.getMasterProcedureExecutor();
|
final ProcedureExecutor<MasterProcedureEnv> pExecutor = master.getMasterProcedureExecutor();
|
||||||
ServerCrashProcedure proc = new ServerCrashProcedure(hostname123, false, false);
|
ServerCrashProcedure proc = new ServerCrashProcedure(
|
||||||
|
pExecutor.getEnvironment(), hostname123, false, false);
|
||||||
|
|
||||||
ProcedureTestingUtility.submitAndWait(pExecutor, proc);
|
ProcedureTestingUtility.submitAndWait(pExecutor, proc);
|
||||||
|
|
||||||
|
|
|
@ -145,7 +145,7 @@ public class TestMasterProcedureEvents {
|
||||||
master.getServerManager().moveFromOnelineToDeadServers(hrs.getServerName());
|
master.getServerManager().moveFromOnelineToDeadServers(hrs.getServerName());
|
||||||
|
|
||||||
long procId = procExec.submitProcedure(
|
long procId = procExec.submitProcedure(
|
||||||
new ServerCrashProcedure(hrs.getServerName(), true, carryingMeta));
|
new ServerCrashProcedure(procExec.getEnvironment(), hrs.getServerName(), true, carryingMeta));
|
||||||
|
|
||||||
for (int i = 0; i < 10; ++i) {
|
for (int i = 0; i < 10; ++i) {
|
||||||
Thread.sleep(100);
|
Thread.sleep(100);
|
||||||
|
|
|
@ -118,7 +118,8 @@ public class TestServerCrashProcedure {
|
||||||
ProcedureTestingUtility.waitNoProcedureRunning(procExec);
|
ProcedureTestingUtility.waitNoProcedureRunning(procExec);
|
||||||
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
|
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
|
||||||
long procId =
|
long procId =
|
||||||
procExec.submitProcedure(new ServerCrashProcedure(hrs.getServerName(), true, carryingMeta));
|
procExec.submitProcedure(new ServerCrashProcedure(
|
||||||
|
procExec.getEnvironment(), hrs.getServerName(), true, carryingMeta));
|
||||||
// Now run through the procedure twice crashing the executor on each step...
|
// Now run through the procedure twice crashing the executor on each step...
|
||||||
MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId);
|
MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId);
|
||||||
// Assert all data came back.
|
// Assert all data came back.
|
||||||
|
|
Loading…
Reference in New Issue