HBASE-17149 Procedure V2 - Fix nonce submission to avoid unnecessary calling coprocessor multiple times; ADDENDUM by Stephen Yuan Jiang

This commit is contained in:
Michael Stack 2016-12-28 11:10:07 -08:00
parent 79e5efd35c
commit a3e0e0df0d
10 changed files with 10 additions and 37 deletions

View File

@ -142,9 +142,6 @@ import org.apache.hadoop.hbase.regionserver.compactions.FIFOCompactionPolicy;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationFactory;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.ReplicationPeers;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
import org.apache.hadoop.hbase.replication.ReplicationQueuesZKImpl;
import org.apache.hadoop.hbase.replication.master.TableCFsUpdater;
import org.apache.hadoop.hbase.replication.regionserver.Replication;
@ -162,7 +159,6 @@ import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.VersionInfo;
@ -2108,7 +2104,8 @@ public class HMaster extends HRegionServer implements MasterServices {
LOG.info(getClientIdAuditPrefix() + " modify " + descriptor);
// Execute the operation synchronously - wait for the operation to complete before continuing.
// Execute the operation synchronously - wait for the operation to complete before
// continuing.
ProcedurePrepareLatch latch = ProcedurePrepareLatch.createLatch(2, 0);
submitProcedure(new ModifyColumnFamilyProcedure(procedureExecutor.getEnvironment(),
tableName, descriptor, latch));
@ -2750,14 +2747,15 @@ public class HMaster extends HRegionServer implements MasterServices {
throw new BypassCoprocessorException();
}
LOG.info(getClientIdAuditPrefix() + " creating " + namespaceDescriptor);
// Execute the operation synchronously - wait for the operation to complete before continuing.
// Execute the operation synchronously - wait for the operation to complete before
// continuing.
setProcId(getClusterSchema().createNamespace(namespaceDescriptor, getNonceKey()));
getMaster().getMasterCoprocessorHost().postCreateNamespace(namespaceDescriptor);
}
@Override
protected String getDescription() {
return "CreateTableProcedure";
return "CreateNamespaceProcedure";
}
});
}
@ -2783,14 +2781,15 @@ public class HMaster extends HRegionServer implements MasterServices {
throw new BypassCoprocessorException();
}
LOG.info(getClientIdAuditPrefix() + " modify " + namespaceDescriptor);
// Execute the operation synchronously - wait for the operation to complete before continuing.
// Execute the operation synchronously - wait for the operation to complete before
// continuing.
setProcId(getClusterSchema().modifyNamespace(namespaceDescriptor, getNonceKey()));
getMaster().getMasterCoprocessorHost().postModifyNamespace(namespaceDescriptor);
}
@Override
protected String getDescription() {
return "CreateTableProcedure";
return "ModifyNamespaceProcedure";
}
});
}
@ -2814,7 +2813,8 @@ public class HMaster extends HRegionServer implements MasterServices {
throw new BypassCoprocessorException();
}
LOG.info(getClientIdAuditPrefix() + " delete " + name);
// Execute the operation synchronously - wait for the operation to complete before continuing.
// Execute the operation synchronously - wait for the operation to complete before
// continuing.
setProcId(getClusterSchema().deleteNamespace(name, getNonceKey()));
getMaster().getMasterCoprocessorHost().postDeleteNamespace(name);
}

View File

@ -22,10 +22,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableExistsException;
@ -42,9 +39,6 @@ import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -28,7 +28,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.NamespaceNotFoundException;

View File

@ -22,11 +22,7 @@ import static org.junit.Assert.assertTrue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;

View File

@ -28,7 +28,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;

View File

@ -25,7 +25,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
@ -51,8 +50,6 @@ public class TestMergeTableRegionsProcedure {
private static final Log LOG = LogFactory.getLog(TestMergeTableRegionsProcedure.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static long nonceGroup = HConstants.NO_NONCE;
private static long nonce = HConstants.NO_NONCE;
private static final int initialRegionCount = 4;
private final static byte[] FAMILY = Bytes.toBytes("FAMILY");
@ -87,9 +84,6 @@ public class TestMergeTableRegionsProcedure {
@Before
public void setup() throws Exception {
resetProcExecutorTestingKillFlag();
nonceGroup =
MasterProcedureTestingUtility.generateNonceGroup(UTIL.getHBaseCluster().getMaster());
nonce = MasterProcedureTestingUtility.generateNonce(UTIL.getHBaseCluster().getMaster());
// Turn off balancer so it doesn't cut in and mess up our placements.
UTIL.getHBaseAdmin().setBalancerRunning(false, true);
// Turn off the meta scanner so it don't remove parent on us.

View File

@ -25,7 +25,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;

View File

@ -23,7 +23,6 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.ProcedureInfo;

View File

@ -35,9 +35,6 @@ public abstract class TestTableDDLProcedureBase {
private static final Log LOG = LogFactory.getLog(TestTableDDLProcedureBase.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected static long nonceGroup;
protected static long nonce;
private static void setupConf(Configuration conf) {
conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1);
}
@ -60,9 +57,6 @@ public abstract class TestTableDDLProcedureBase {
@Before
public void setup() throws Exception {
resetProcExecutorTestingKillFlag();
nonceGroup =
MasterProcedureTestingUtility.generateNonceGroup(UTIL.getHBaseCluster().getMaster());
nonce = MasterProcedureTestingUtility.generateNonce(UTIL.getHBaseCluster().getMaster());
}
@After

View File

@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;