+ *
+ */
+@InterfaceAudience.Private
+public final class JRubyFormat {
+ private static final Escaper escaper;
+
+ static {
+ escaper = Escapers.builder()
+ .addEscape('\\', "\\\\")
+ .addEscape('\'', "\\'")
+ .addEscape('\n', "\\n")
+ .addEscape('\r', "\\r")
+ .addEscape('\t', "\\t")
+ .addEscape('\f', "\\f")
+ .build();
+ }
+
+ private JRubyFormat() {
+ }
+
+ private static String escape(Object object) {
+ if (object == null) {
+ return "";
+ } else {
+ return escaper.escape(object.toString());
+ }
+ }
+
+ @SuppressWarnings({ "unchecked" })
+ private static void appendJRuby(StringBuilder builder, Object object) {
+ if (object == null) {
+ builder.append("''");
+ } else if (object instanceof List) {
+ builder.append("[");
+
+ boolean first = true;
+
+ for (Object element: (List)object) {
+ if (first) {
+ first = false;
+ builder.append(" ");
+ } else {
+ builder.append(", ");
+ }
+
+ appendJRuby(builder, element);
+ }
+
+ if (!first) {
+ builder.append(" ");
+ }
+
+ builder.append("]");
+ } else if (object instanceof Map) {
+ builder.append("{");
+
+ boolean first = true;
+
+ for (Entry entry: ((Map)object).entrySet()) {
+ if (first) {
+ first = false;
+ builder.append(" ");
+ } else {
+ builder.append(", ");
+ }
+
+ String key = entry.getKey();
+ String escapedKey = escape(key);
+
+ if (key.equals(escapedKey)) {
+ builder.append(key);
+ } else {
+ builder.append("'").append(escapedKey).append("'");
+ }
+
+ builder.append(" => ");
+ appendJRuby(builder, entry.getValue());
+ }
+
+ if (!first) {
+ builder.append(" ");
+ }
+
+ builder.append("}");
+ } else if (object instanceof byte[]) {
+ String byteString = Bytes.toHex((byte[])object);
+ builder.append("'").append(escape(byteString)).append("'");
+ } else {
+ builder.append("'").append(escape(object)).append("'");
+ }
+ }
+
+ public static String print(Object object) {
+ StringBuilder builder = new StringBuilder();
+
+ appendJRuby(builder, object);
+
+ return builder.toString();
+ }
+}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java
new file mode 100644
index 00000000000..96b3da08ca7
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestJRubyFormat.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestJRubyFormat {
+ @Test
+ public void testPrint() {
+ Map map = new LinkedHashMap<>();
+ map.put("null", null);
+ map.put("boolean", true);
+ map.put("number", 1);
+ map.put("string", "str");
+ map.put("binary", new byte[] { 1, 2, 3 });
+ map.put("list", Lists.newArrayList(1, "2", true));
+
+ String jrubyString = JRubyFormat.print(map);
+ assertEquals("{ null => '', boolean => 'true', number => '1', "
+ + "string => 'str', binary => '010203', "
+ + "list => [ '1', '2', 'true' ] }", jrubyString);
+ }
+
+ @Test
+ public void testEscape() {
+ String jrubyString = JRubyFormat.print("\\\'\n\r\t\f");
+ assertEquals("'\\\\\\'\\n\\r\\t\\f'", jrubyString);
+ }
+}
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockType.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockType.java
new file mode 100644
index 00000000000..e4d867d8f04
--- /dev/null
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockType.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.procedure2;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public enum LockType {
+ EXCLUSIVE, SHARED
+}
\ No newline at end of file
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResource.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResource.java
new file mode 100644
index 00000000000..e3320ab26f6
--- /dev/null
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResource.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.procedure2;
+
+import java.util.List;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.procedure2.LockedResourceType;
+
+@InterfaceAudience.Private
+public class LockedResource {
+ private final LockedResourceType resourceType;
+ private final String resourceName;
+ private final LockType lockType;
+ private final Procedure> exclusiveLockOwnerProcedure;
+ private final int sharedLockCount;
+ private final List> waitingProcedures;
+
+ public LockedResource(LockedResourceType resourceType, String resourceName,
+ LockType lockType, Procedure> exclusiveLockOwnerProcedure,
+ int sharedLockCount, List> waitingProcedures) {
+ this.resourceType = resourceType;
+ this.resourceName = resourceName;
+ this.lockType = lockType;
+ this.exclusiveLockOwnerProcedure = exclusiveLockOwnerProcedure;
+ this.sharedLockCount = sharedLockCount;
+ this.waitingProcedures = waitingProcedures;
+ }
+
+ public LockedResourceType getResourceType() {
+ return resourceType;
+ }
+
+ public String getResourceName() {
+ return resourceName;
+ }
+
+ public LockType getLockType() {
+ return lockType;
+ }
+
+ public Procedure> getExclusiveLockOwnerProcedure() {
+ return exclusiveLockOwnerProcedure;
+ }
+
+ public int getSharedLockCount() {
+ return sharedLockCount;
+ }
+
+ public List> getWaitingProcedures() {
+ return waitingProcedures;
+ }
+}
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
new file mode 100644
index 00000000000..29820f1d4c6
--- /dev/null
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.procedure2;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public enum LockedResourceType {
+ SERVER, NAMESPACE, TABLE, REGION
+}
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index 335e83c2052..db488c960c5 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -165,17 +163,17 @@ public abstract class Procedure implements Comparable implements ComparableIf you need to hold the lock for the life of the Procdure -- i.e. you do not
+ *
If you need to hold the lock for the life of the Procedure -- i.e. you do not
* want any other Procedure interfering while this Procedure is running, see
* {@link #holdLock(Object)}.
*
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index d0052f64f34..9337530eeeb 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -22,8 +22,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@@ -241,7 +239,7 @@ public class ProcedureExecutor {
}
/**
- * Map the the procId returned by submitProcedure(), the Root-ProcID, to the ProcedureInfo.
+ * Map the the procId returned by submitProcedure(), the Root-ProcID, to the Procedure.
* Once a Root-Procedure completes (success or failure), the result will be added to this map.
* The user of ProcedureExecutor should call getResult(procId) to get the result.
*/
@@ -750,14 +748,22 @@ public class ProcedureExecutor {
}
}
- private static class FailedProcedure extends Procedure {
+ public static class FailedProcedure extends Procedure {
private String procName;
- public FailedProcedure(NonceKey nonceKey, String procName, User owner,
- IOException exception) {
+ public FailedProcedure() {
+ }
+
+ public FailedProcedure(long procId, String procName, User owner,
+ NonceKey nonceKey, IOException exception) {
this.procName = procName;
- setNonceKey(nonceKey);
+ setProcId(procId);
+ setState(ProcedureState.ROLLEDBACK);
setOwner(owner);
+ setNonceKey(nonceKey);
+ long currentTime = EnvironmentEdgeManager.currentTime();
+ setSubmittedTime(currentTime);
+ setLastUpdate(currentTime);
setFailure(Objects.toString(exception.getMessage(), ""), exception);
}
@@ -785,11 +791,13 @@ public class ProcedureExecutor {
}
@Override
- protected void serializeStateData(OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
@Override
- protected void deserializeStateData(InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
}
@@ -809,7 +817,9 @@ public class ProcedureExecutor {
final Long procId = nonceKeysToProcIdsMap.get(nonceKey);
if (procId == null || completed.containsKey(procId)) return;
- Procedure proc = new FailedProcedure(nonceKey, procName, procOwner, exception);
+ Procedure> proc = new FailedProcedure(procId.longValue(),
+ procName, procOwner, nonceKey, exception);
+
completed.putIfAbsent(procId, new CompletedProcedureRetainer(proc));
}
@@ -1045,15 +1055,17 @@ public class ProcedureExecutor {
}
/**
- * List procedures.
+ * Get procedures.
* @return the procedures in a list
*/
- public List listProcedures() {
- final List procedureLists = new ArrayList<>(procedures.size() + completed.size());
- procedureLists.addAll(procedures.values());
+ public List> getProcedures() {
+ final List> procedureLists = new ArrayList<>(procedures.size() + completed.size());
+ for (Procedure> procedure : procedures.values()) {
+ procedureLists.add(procedure);
+ }
// Note: The procedure could show up twice in the list with different state, as
// it could complete after we walk through procedures list and insert into
- // procedureList - it is ok, as we will use the information in the ProcedureInfo
+ // procedureList - it is ok, as we will use the information in the Procedure
// to figure it out; to prevent this would increase the complexity of the logic.
for (CompletedProcedureRetainer retainer: completed.values()) {
procedureLists.add(retainer.getProcedure());
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.java
index b148dae98a1..596ff21a74a 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.java
@@ -18,9 +18,7 @@
package org.apache.hadoop.hbase.procedure2;
-import java.io.InputStream;
-import java.io.OutputStream;
-
+import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
@@ -58,12 +56,12 @@ public abstract class ProcedureInMemoryChore extends Procedure listLocks();
+ List getLocks();
/**
- * @return {@link LockInfo} for resource of specified type & name. null if resource is not locked.
+ * @return {@link LockedResource} for resource of specified type & name. null if resource is not locked.
*/
- LockInfo getLockInfoForResource(LockInfo.ResourceType resourceType, String resourceName);
+ LockedResource getLockResource(LockedResourceType resourceType, String resourceName);
+
/**
* Returns the number of elements in this queue.
* @return the number of elements in this queue.
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureStateSerializer.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureStateSerializer.java
new file mode 100644
index 00000000000..03842d923d7
--- /dev/null
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureStateSerializer.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.procedure2;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
+
+public interface ProcedureStateSerializer {
+ void serialize(Message message) throws IOException;
+
+ M deserialize(Class clazz) throws IOException;
+}
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
index 3232f2b3bac..2381abd96df 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
@@ -18,20 +18,21 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
+import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
-
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.ProcedureInfo;
-import org.apache.hadoop.hbase.ProcedureState;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
-import org.apache.hadoop.hbase.util.NonceKey;
-
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Any;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+import org.apache.hadoop.hbase.util.NonceKey;
/**
* Helper to convert to/from ProcedureProtos
@@ -84,6 +85,69 @@ public final class ProcedureUtil {
// convert to and from Procedure object
// ==========================================================================
+ /**
+ * A serializer for our Procedures. Instead of the previous serializer, it
+ * uses the stateMessage list to store the internal state of the Procedures.
+ */
+ private static class StateSerializer implements ProcedureStateSerializer {
+ private final ProcedureProtos.Procedure.Builder builder;
+ private int deserializeIndex;
+
+ public StateSerializer(ProcedureProtos.Procedure.Builder builder) {
+ this.builder = builder;
+ }
+
+ @Override
+ public void serialize(Message message) throws IOException {
+ Any packedMessage = Any.pack(message);
+ builder.addStateMessage(packedMessage);
+ }
+
+ @Override
+ public M deserialize(Class clazz)
+ throws IOException {
+ if (deserializeIndex >= builder.getStateMessageCount()) {
+ throw new IOException("Invalid state message index: " + deserializeIndex);
+ }
+
+ try {
+ Any packedMessage = builder.getStateMessage(deserializeIndex++);
+ return packedMessage.unpack(clazz);
+ } catch (InvalidProtocolBufferException e) {
+ throw e.unwrapIOException();
+ }
+ }
+ }
+
+ /**
+ * A serializer (deserializer) for those Procedures which were serialized
+ * before this patch. It deserializes the old, binary stateData field.
+ */
+ private static class CompatStateSerializer implements ProcedureStateSerializer {
+ private InputStream inputStream;
+
+ public CompatStateSerializer(InputStream inputStream) {
+ this.inputStream = inputStream;
+ }
+
+ @Override
+ public void serialize(Message message) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public M deserialize(Class clazz)
+ throws IOException {
+ Parser parser = (Parser) Internal.getDefaultInstance(clazz).getParserForType();
+ try {
+ return parser.parseDelimitedFrom(inputStream);
+ } catch (InvalidProtocolBufferException e) {
+ throw e.unwrapIOException();
+ }
+ }
+ }
+
/**
* Helper to convert the procedure to protobuf.
* Used by ProcedureStore implementations.
@@ -130,15 +194,8 @@ public final class ProcedureUtil {
builder.setResult(UnsafeByteOperations.unsafeWrap(result));
}
- final ByteString.Output stateStream = ByteString.newOutput();
- try {
- proc.serializeStateData(stateStream);
- if (stateStream.size() > 0) {
- builder.setStateData(stateStream.toByteString());
- }
- } finally {
- stateStream.close();
- }
+ ProcedureStateSerializer serializer = new StateSerializer(builder);
+ proc.serializeStateData(serializer);
if (proc.getNonceKey() != null) {
builder.setNonceGroup(proc.getNonceKey().getNonceGroup());
@@ -198,87 +255,62 @@ public final class ProcedureUtil {
proc.setNonceKey(new NonceKey(proto.getNonceGroup(), proto.getNonce()));
}
- // we want to call deserialize even when the stream is empty, mainly for testing.
- proc.deserializeStateData(proto.getStateData().newInput());
+ ProcedureStateSerializer serializer = null;
+
+ if (proto.getStateMessageCount() > 0) {
+ serializer = new StateSerializer(proto.toBuilder());
+ } else if (proto.hasStateData()) {
+ InputStream inputStream = proto.getStateData().newInput();
+ serializer = new CompatStateSerializer(inputStream);
+ }
+
+ if (serializer != null) {
+ proc.deserializeStateData(serializer);
+ }
return proc;
}
// ==========================================================================
- // convert to and from ProcedureInfo object
+ // convert from LockedResource object
// ==========================================================================
- /**
- * @return Convert the current {@link ProcedureInfo} into a Protocol Buffers Procedure
- * instance.
- */
- public static ProcedureProtos.Procedure convertToProtoProcedure(final ProcedureInfo procInfo) {
- final ProcedureProtos.Procedure.Builder builder = ProcedureProtos.Procedure.newBuilder();
+ public static LockServiceProtos.LockedResourceType convertToProtoResourceType(
+ LockedResourceType resourceType) {
+ return LockServiceProtos.LockedResourceType.valueOf(resourceType.name());
+ }
- builder.setClassName(procInfo.getProcName());
- builder.setProcId(procInfo.getProcId());
- builder.setSubmittedTime(procInfo.getSubmittedTime());
- builder.setState(ProcedureProtos.ProcedureState.valueOf(procInfo.getProcState().name()));
- builder.setLastUpdate(procInfo.getLastUpdate());
+ public static LockServiceProtos.LockType convertToProtoLockType(LockType lockType) {
+ return LockServiceProtos.LockType.valueOf(lockType.name());
+ }
- if (procInfo.hasParentId()) {
- builder.setParentId(procInfo.getParentId());
+ public static LockServiceProtos.LockedResource convertToProtoLockedResource(
+ LockedResource lockedResource) throws IOException
+ {
+ LockServiceProtos.LockedResource.Builder builder =
+ LockServiceProtos.LockedResource.newBuilder();
+
+ builder
+ .setResourceType(convertToProtoResourceType(lockedResource.getResourceType()))
+ .setResourceName(lockedResource.getResourceName())
+ .setLockType(convertToProtoLockType(lockedResource.getLockType()));
+
+ Procedure> exclusiveLockOwnerProcedure = lockedResource.getExclusiveLockOwnerProcedure();
+
+ if (exclusiveLockOwnerProcedure != null) {
+ ProcedureProtos.Procedure exclusiveLockOwnerProcedureProto =
+ convertToProtoProcedure(exclusiveLockOwnerProcedure);
+ builder.setExclusiveLockOwnerProcedure(exclusiveLockOwnerProcedureProto);
}
- if (procInfo.hasOwner()) {
- builder.setOwner(procInfo.getProcOwner());
- }
+ builder.setSharedLockCount(lockedResource.getSharedLockCount());
- if (procInfo.isFailed()) {
- builder.setException(ForeignExceptionUtil.toProtoForeignException(procInfo.getException()));
- }
-
- if (procInfo.hasResultData()) {
- builder.setResult(UnsafeByteOperations.unsafeWrap(procInfo.getResult()));
+ for (Procedure> waitingProcedure : lockedResource.getWaitingProcedures()) {
+ ProcedureProtos.Procedure waitingProcedureProto =
+ convertToProtoProcedure(waitingProcedure);
+ builder.addWaitingProcedures(waitingProcedureProto);
}
return builder.build();
}
-
- /**
- * Helper to convert the protobuf object.
- * @return Convert the current Protocol Buffers Procedure to {@link ProcedureInfo}
- * instance.
- */
- public static ProcedureInfo convertToProcedureInfo(final ProcedureProtos.Procedure procProto) {
- NonceKey nonceKey = null;
- if (procProto.getNonce() != HConstants.NO_NONCE) {
- nonceKey = new NonceKey(procProto.getNonceGroup(), procProto.getNonce());
- }
-
- return new ProcedureInfo(procProto.getProcId(), procProto.getClassName(),
- procProto.hasOwner() ? procProto.getOwner() : null,
- convertToProcedureState(procProto.getState()),
- procProto.hasParentId() ? procProto.getParentId() : -1, nonceKey,
- procProto.hasException() ?
- ForeignExceptionUtil.toIOException(procProto.getException()) : null,
- procProto.getLastUpdate(), procProto.getSubmittedTime(),
- procProto.hasResult() ? procProto.getResult().toByteArray() : null);
- }
-
- public static ProcedureState convertToProcedureState(ProcedureProtos.ProcedureState state) {
- return ProcedureState.valueOf(state.name());
- }
-
- public static ProcedureInfo convertToProcedureInfo(final Procedure proc) {
- return convertToProcedureInfo(proc, null);
- }
-
- /**
- * Helper to create the ProcedureInfo from Procedure.
- */
- public static ProcedureInfo convertToProcedureInfo(final Procedure proc,
- final NonceKey nonceKey) {
- final RemoteProcedureException exception = proc.hasException() ? proc.getException() : null;
- return new ProcedureInfo(proc.getProcId(), proc.toStringClass(), proc.getOwner(),
- convertToProcedureState(proc.getState()),
- proc.hasParent() ? proc.getParentProcId() : -1, nonceKey,
- exception != null ? exception.unwrapRemoteIOException() : null,
- proc.getLastUpdate(), proc.getSubmittedTime(), proc.getResult());
- }
}
\ No newline at end of file
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
index 64bb27892ee..f03653f62a0 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SequentialProcedure.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData;
@@ -69,15 +66,17 @@ public abstract class SequentialProcedure extends Procedure listLocks() {
+ public List getLocks() {
return Collections.emptyList();
}
@Override
- public LockInfo getLockInfoForResource(LockInfo.ResourceType resourceType, String resourceName) {
+ public LockedResource getLockResource(LockedResourceType resourceType,
+ String resourceName) {
return null;
}
}
\ No newline at end of file
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 5de50668fa7..25dfe8b1e0d 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -285,17 +283,19 @@ public abstract class StateMachineProcedure
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
StateMachineProcedureData.Builder data = StateMachineProcedureData.newBuilder();
for (int i = 0; i < stateCount; ++i) {
data.addState(states[i]);
}
- data.build().writeDelimitedTo(stream);
+ serializer.serialize(data.build());
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- StateMachineProcedureData data = StateMachineProcedureData.parseDelimitedFrom(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ StateMachineProcedureData data = serializer.deserialize(StateMachineProcedureData.class);
stateCount = data.getStateCount();
if (stateCount > 0) {
states = new int[stateCount];
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
index 5cdbc354815..99d3c282cd8 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
@@ -23,8 +23,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.Callable;
@@ -37,11 +35,12 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Threads;
@@ -367,11 +366,13 @@ public class ProcedureTestingUtility {
protected boolean abort(TEnv env) { return false; }
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
}
@@ -416,19 +417,23 @@ public class ProcedureTestingUtility {
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
- StreamUtils.writeRawVInt32(stream, data != null ? data.length : 0);
- if (data != null) stream.write(data);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ ByteString dataString = ByteString.copyFrom((data == null) ? new byte[0] : data);
+ BytesValue.Builder builder = BytesValue.newBuilder().setValue(dataString);
+ serializer.serialize(builder.build());
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- int len = StreamUtils.readRawVarint32(stream);
- if (len > 0) {
- data = new byte[len];
- stream.read(data);
- } else {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ BytesValue bytesValue = serializer.deserialize(BytesValue.class);
+ ByteString dataString = bytesValue.getValue();
+
+ if (dataString.isEmpty()) {
data = null;
+ } else {
+ data = dataString.toByteArray();
}
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
index b81e0f90454..ce9795f632f 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
@@ -28,10 +26,10 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
-import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -42,8 +40,6 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureEvents {
@@ -163,15 +159,23 @@ public class TestProcedureEvents {
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
- StreamUtils.writeRawVInt32(stream, ntimeouts.get());
- StreamUtils.writeRawVInt32(stream, maxTimeouts);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ Int32Value.Builder ntimeoutsBuilder = Int32Value.newBuilder().setValue(ntimeouts.get());
+ serializer.serialize(ntimeoutsBuilder.build());
+
+ Int32Value.Builder maxTimeoutsBuilder = Int32Value.newBuilder().setValue(maxTimeouts);
+ serializer.serialize(maxTimeoutsBuilder.build());
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- ntimeouts.set(StreamUtils.readRawVarint32(stream));
- maxTimeouts = StreamUtils.readRawVarint32(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ Int32Value ntimeoutsValue = serializer.deserialize(Int32Value.class);
+ ntimeouts.set(ntimeoutsValue.getValue());
+
+ Int32Value maxTimeoutsValue = serializer.deserialize(Int32Value.class);
+ maxTimeouts = maxTimeoutsValue.getValue();
}
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
index 9681bfb1e33..f1dadb950ce 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.CountDownLatch;
@@ -31,6 +29,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -382,17 +381,19 @@ public class TestProcedureRecovery {
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
- stream.write(Bytes.toBytes(iResult));
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
+ Int32Value.Builder builder = Int32Value.newBuilder().setValue(iResult);
+ serializer.serialize(builder.build());
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
- byte[] data = new byte[4];
- stream.read(data);
- iResult = Bytes.toInt(data);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
+ Int32Value value = serializer.deserialize(Int32Value.class);
+ iResult = value.getValue();
}
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
index bd614e38c96..80264f56545 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicLong;
@@ -29,9 +27,9 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
-import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -195,13 +193,17 @@ public class TestProcedureReplayOrder {
protected boolean abort(TestProcedureEnv env) { return true; }
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
- StreamUtils.writeLong(stream, execId);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ Int64Value.Builder builder = Int64Value.newBuilder().setValue(execId);
+ serializer.serialize(builder.build());
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- execId = StreamUtils.readLong(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ Int64Value value = serializer.deserialize(Int64Value.class);
+ execId = value.getValue();
step = 2;
}
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
index 0146bc7ea39..f86df2db9d4 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
@@ -251,11 +249,13 @@ public class TestProcedureSuspended {
protected boolean abort(TestProcEnv env) { return false; }
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureToString.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureToString.java
index 78daf5a92b9..af25108d172 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureToString.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureToString.java
@@ -20,9 +20,6 @@ package org.apache.hadoop.hbase.procedure2;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -57,11 +54,13 @@ public class TestProcedureToString {
}
@Override
- protected void serializeStateData(OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
@Override
- protected void deserializeStateData(InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java
index 7f98b80abce..dec58541510 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java
@@ -18,11 +18,7 @@
package org.apache.hadoop.hbase.procedure2;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.util.JsonFormat;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -34,8 +30,6 @@ import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureUtil {
- private static final Log LOG = LogFactory.getLog(TestProcedureUtil.class);
-
@Test
public void testValidation() throws Exception {
ProcedureUtil.validateClass(new TestProcedure(10));
@@ -49,34 +43,15 @@ public class TestProcedureUtil {
@Test
public void testConvert() throws Exception {
// check Procedure to protobuf conversion
- final TestProcedure proc1 = new TestProcedure(10);
+ final TestProcedure proc1 = new TestProcedure(10, 1, new byte[] { 65 });
final ProcedureProtos.Procedure proto1 = ProcedureUtil.convertToProtoProcedure(proc1);
final TestProcedure proc2 = (TestProcedure)ProcedureUtil.convertToProcedure(proto1);
final ProcedureProtos.Procedure proto2 = ProcedureUtil.convertToProtoProcedure(proc2);
assertEquals(false, proto2.hasResult());
assertEquals("Procedure protobuf does not match", proto1, proto2);
-
- // remove the state-data from the procedure protobuf to compare it to the gen ProcedureInfo
- final ProcedureProtos.Procedure pbproc = proto2.toBuilder().clearStateData().build();
-
- // check ProcedureInfo to protobuf conversion
- final ProcedureInfo protoInfo1 = ProcedureUtil.convertToProcedureInfo(proc1);
- final ProcedureProtos.Procedure proto3 = ProcedureUtil.convertToProtoProcedure(protoInfo1);
- final ProcedureInfo protoInfo2 = ProcedureUtil.convertToProcedureInfo(proto3);
- final ProcedureProtos.Procedure proto4 = ProcedureUtil.convertToProtoProcedure(protoInfo2);
- assertEquals("ProcedureInfo protobuf does not match", proto3, proto4);
- assertEquals("ProcedureInfo/Procedure protobuf does not match", pbproc, proto3);
- assertEquals("ProcedureInfo/Procedure protobuf does not match", pbproc, proto4);
}
public static class TestProcedureNoDefaultConstructor extends TestProcedure {
public TestProcedureNoDefaultConstructor(int x) {}
}
-
- public static void main(final String [] args) throws Exception {
- final TestProcedure proc1 = new TestProcedure(10);
- final ProcedureProtos.Procedure proto1 = ProcedureUtil.convertToProtoProcedure(proc1);
- JsonFormat.Printer printer = JsonFormat.printer().omittingInsignificantWhitespace();
- System.out.println(printer.print(proto1));
- }
}
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
index b1d06693942..488216815c5 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
@@ -336,11 +334,13 @@ public class TestYieldProcedures {
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
}
}
@@ -353,6 +353,7 @@ public class TestYieldProcedures {
public TestScheduler() {}
+ @Override
public void addFront(final Procedure proc) {
addFrontCalls++;
super.addFront(proc);
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
index 9b8c46fc58f..44c8e127454 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
@@ -43,9 +44,9 @@ import org.apache.hadoop.hbase.procedure2.SequentialProcedure;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
@@ -514,7 +515,7 @@ public class TestWALProcedureStore {
storeRestart(loader);
assertTrue(procStore.getCorruptedLogs() != null);
assertEquals(1, procStore.getCorruptedLogs().size());
- assertEquals(85, loader.getLoadedCount());
+ assertEquals(87, loader.getLoadedCount());
assertEquals(0, loader.getCorruptedCount());
}
@@ -911,22 +912,22 @@ public class TestWALProcedureStore {
protected boolean abort(Void env) { return false; }
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
long procId = getProcId();
if (procId % 2 == 0) {
- stream.write(Bytes.toBytes(procId));
+ Int64Value.Builder builder = Int64Value.newBuilder().setValue(procId);
+ serializer.serialize(builder.build());
}
}
@Override
- protected void deserializeStateData(InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
long procId = getProcId();
if (procId % 2 == 0) {
- byte[] bProcId = new byte[8];
- assertEquals(8, stream.read(bProcId));
- assertEquals(procId, Bytes.toLong(bProcId));
- } else {
- assertEquals(0, stream.available());
+ Int64Value value = serializer.deserialize(Int64Value.class);
+ assertEquals(procId, value.getValue());
}
}
}
diff --git a/hbase-protocol-shaded/src/main/protobuf/LockService.proto b/hbase-protocol-shaded/src/main/protobuf/LockService.proto
index 1898e687940..567dee7f47c 100644
--- a/hbase-protocol-shaded/src/main/protobuf/LockService.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/LockService.proto
@@ -71,25 +71,20 @@ message LockProcedureData {
optional bool is_master_lock = 6 [default = false];
}
-enum ResourceType {
- RESOURCE_TYPE_SERVER = 1;
- RESOURCE_TYPE_NAMESPACE = 2;
- RESOURCE_TYPE_TABLE = 3;
- RESOURCE_TYPE_REGION = 4;
+enum LockedResourceType {
+ SERVER = 1;
+ NAMESPACE = 2;
+ TABLE = 3;
+ REGION = 4;
}
-message WaitingProcedure {
- required LockType lock_type = 1;
- required Procedure procedure = 2;
-}
-
-message LockInfo {
- required ResourceType resource_type = 1;
+message LockedResource {
+ required LockedResourceType resource_type = 1;
optional string resource_name = 2;
required LockType lock_type = 3;
optional Procedure exclusive_lock_owner_procedure = 4;
optional int32 shared_lock_count = 5;
- repeated WaitingProcedure waitingProcedures = 6;
+ repeated Procedure waitingProcedures = 6;
}
service LockService {
diff --git a/hbase-protocol-shaded/src/main/protobuf/Master.proto b/hbase-protocol-shaded/src/main/protobuf/Master.proto
index 33f9bf31fff..2c1694e18d6 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Master.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Master.proto
@@ -544,18 +544,18 @@ message AbortProcedureResponse {
required bool is_procedure_aborted = 1;
}
-message ListProceduresRequest {
+message GetProceduresRequest {
}
-message ListProceduresResponse {
+message GetProceduresResponse {
repeated Procedure procedure = 1;
}
-message ListLocksRequest {
+message GetLocksRequest {
}
-message ListLocksResponse {
- repeated LockInfo lock = 1;
+message GetLocksResponse {
+ repeated LockedResource lock = 1;
}
message SetQuotaRequest {
@@ -917,11 +917,11 @@ service MasterService {
returns(AbortProcedureResponse);
/** returns a list of procedures */
- rpc ListProcedures(ListProceduresRequest)
- returns(ListProceduresResponse);
+ rpc GetProcedures(GetProceduresRequest)
+ returns(GetProceduresResponse);
- rpc ListLocks(ListLocksRequest)
- returns(ListLocksResponse);
+ rpc GetLocks(GetLocksRequest)
+ returns(GetLocksResponse);
/** Add a replication peer */
rpc AddReplicationPeer(AddReplicationPeerRequest)
diff --git a/hbase-protocol-shaded/src/main/protobuf/Procedure.proto b/hbase-protocol-shaded/src/main/protobuf/Procedure.proto
index 1a3ecf5ac12..c13a37ec55e 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Procedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Procedure.proto
@@ -23,6 +23,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import "google/protobuf/any.proto";
import "ErrorHandling.proto";
enum ProcedureState {
@@ -55,7 +56,8 @@ message Procedure {
// user state/results
optional ForeignExceptionMessage exception = 10;
optional bytes result = 11; // opaque (user) result structure
- optional bytes state_data = 12; // opaque (user) procedure internal-state
+ optional bytes state_data = 12; // opaque (user) procedure internal-state - OBSOLATE
+ repeated google.protobuf.Any state_message = 15; // opaque (user) procedure internal-state
// Nonce to prevent same procedure submit by multiple times
optional uint64 nonce_group = 13 [default = 0];
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
index e3f5be58ab2..f0cefe46962 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java
@@ -38,17 +38,16 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.HMaster;
-import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.master.ServerManager;
+import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
import org.apache.hadoop.hbase.master.locking.LockManager;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.net.Address;
-
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
@@ -253,7 +252,7 @@ public class RSGroupAdminServer implements RSGroupAdmin {
for (TableName table: tables) {
LOG.info("Unassigning region(s) from " + table + " for table move to " + targetGroupName);
LockManager.MasterLock lock = master.getLockManager().createMasterLock(table,
- LockProcedure.LockType.EXCLUSIVE, this.getClass().getName() + ": RSGroup: table move");
+ LockType.EXCLUSIVE, this.getClass().getName() + ": RSGroup: table move");
try {
try {
lock.acquire();
@@ -420,7 +419,7 @@ public class RSGroupAdminServer implements RSGroupAdmin {
}
for (TableName table: tables) {
LockManager.MasterLock lock = master.getLockManager().createMasterLock(table,
- LockProcedure.LockType.EXCLUSIVE, this.getClass().getName() + ": RSGroup: table move");
+ LockType.EXCLUSIVE, this.getClass().getName() + ": RSGroup: table move");
try {
try {
lock.acquire();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index 8e368ba8e9c..f1cf49d8cfc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaMutationAnnotation;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -43,7 +42,9 @@ import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.net.Address;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
@@ -970,38 +971,38 @@ public interface MasterObserver extends Coprocessor {
throws IOException {}
/**
- * Called before a listProcedures request has been processed.
+ * Called before a getProcedures request has been processed.
* @param ctx the environment to interact with the framework and master
*/
- default void preListProcedures(ObserverContext ctx)
+ default void preGetProcedures(ObserverContext ctx)
throws IOException {}
/**
- * Called after a listProcedures request has been processed.
+ * Called after a getProcedures request has been processed.
* @param ctx the environment to interact with the framework and master
- * @param procInfoList the list of procedures about to be returned
+ * @param procList the list of procedures about to be returned
*/
- default void postListProcedures(
+ default void postGetProcedures(
ObserverContext ctx,
- List procInfoList) throws IOException {}
+ List> procList) throws IOException {}
/**
- * Called before a listLocks request has been processed.
+ * Called before a getLocks request has been processed.
* @param ctx the environment to interact with the framework and master
* @throws IOException if something went wrong
*/
- default void preListLocks(ObserverContext ctx)
+ default void preGetLocks(ObserverContext ctx)
throws IOException {}
/**
- * Called after a listLocks request has been processed.
+ * Called after a getLocks request has been processed.
* @param ctx the environment to interact with the framework and master
- * @param lockInfoList the list of locks about to be returned
+ * @param lockedResources the list of locks about to be returned
* @throws IOException if something went wrong
*/
- default void postListLocks(
+ default void postGetLocks(
ObserverContext ctx,
- List lockInfoList) throws IOException {}
+ List lockedResources) throws IOException {}
/**
* Called prior to moving a given region from one region server to another.
@@ -1890,7 +1891,7 @@ public interface MasterObserver extends Coprocessor {
* @param ctx the environment to interact with the framework and master
*/
default void preRequestLock(ObserverContext ctx, String namespace,
- TableName tableName, HRegionInfo[] regionInfos, LockProcedure.LockType type,
+ TableName tableName, HRegionInfo[] regionInfos, LockType type,
String description) throws IOException {}
/**
@@ -1898,7 +1899,7 @@ public interface MasterObserver extends Coprocessor {
* @param ctx the environment to interact with the framework and master
*/
default void postRequestLock(ObserverContext ctx, String namespace,
- TableName tableName, HRegionInfo[] regionInfos, LockProcedure.LockType type,
+ TableName tableName, HRegionInfo[] regionInfos, LockType type,
String description) throws IOException {}
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
index c4438bb4f31..c4c14957572 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java
@@ -29,10 +29,10 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.master.locking.LockManager;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.mob.ExpiredMobFileCleaner;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.procedure2.LockType;
/**
* The Class ExpiredMobFileCleanerChore for running cleaner regularly to remove the expired
@@ -68,7 +68,7 @@ public class ExpiredMobFileCleanerChore extends ScheduledChore {
// clean only for mob-enabled column.
// obtain a read table lock before cleaning, synchronize with MobFileCompactionChore.
final LockManager.MasterLock lock = master.getLockManager().createMasterLock(
- MobUtils.getTableLockName(htd.getTableName()), LockProcedure.LockType.SHARED,
+ MobUtils.getTableLockName(htd.getTableName()), LockType.SHARED,
this.getClass().getSimpleName() + ": Cleaning expired mob files");
try {
lock.acquire();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index ad304aeccd9..0c79c586104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -68,7 +68,6 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.PleaseHoldException;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableDescriptors;
@@ -138,11 +137,10 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.procedure.MasterProcedureManagerHost;
import org.apache.hadoop.hbase.procedure.flush.MasterFlushTableProcedureManager;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.quotas.MasterSpaceQuotaObserver;
@@ -3051,41 +3049,35 @@ public class HMaster extends HRegionServer implements MasterServices {
}
@Override
- public List listProcedures() throws IOException {
+ public List> getProcedures() throws IOException {
if (cpHost != null) {
- cpHost.preListProcedures();
+ cpHost.preGetProcedures();
}
- final List procList = this.procedureExecutor.listProcedures();
- final List procInfoList = new ArrayList<>(procList.size());
-
- for (Procedure proc : procList) {
- ProcedureInfo procInfo = ProcedureUtil.convertToProcedureInfo(proc);
- procInfoList.add(procInfo);
- }
+ final List> procList = this.procedureExecutor.getProcedures();
if (cpHost != null) {
- cpHost.postListProcedures(procInfoList);
+ cpHost.postGetProcedures(procList);
}
- return procInfoList;
+ return procList;
}
@Override
- public List listLocks() throws IOException {
+ public List getLocks() throws IOException {
if (cpHost != null) {
- cpHost.preListLocks();
+ cpHost.preGetLocks();
}
MasterProcedureScheduler procedureScheduler = procedureExecutor.getEnvironment().getProcedureScheduler();
- final List lockInfoList = procedureScheduler.listLocks();
+ final List lockedResources = procedureScheduler.getLocks();
if (cpHost != null) {
- cpHost.postListLocks(lockInfoList);
+ cpHost.postGetLocks(lockedResources);
}
- return lockInfoList;
+ return lockedResources;
}
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index 6c43fc081c0..eaa4f5fd89d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaMutationAnnotation;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -54,7 +53,9 @@ import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.net.Address;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.security.User;
@@ -691,42 +692,42 @@ public class MasterCoprocessorHost
});
}
- public boolean preListProcedures() throws IOException {
+ public boolean preGetProcedures() throws IOException {
return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override
public void call(MasterObserver oserver, ObserverContext ctx)
throws IOException {
- oserver.preListProcedures(ctx);
+ oserver.preGetProcedures(ctx);
}
});
}
- public void postListProcedures(final List procInfoList) throws IOException {
+ public void postGetProcedures(final List> procInfoList) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override
public void call(MasterObserver oserver, ObserverContext ctx)
throws IOException {
- oserver.postListProcedures(ctx, procInfoList);
+ oserver.postGetProcedures(ctx, procInfoList);
}
});
}
- public boolean preListLocks() throws IOException {
+ public boolean preGetLocks() throws IOException {
return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override
public void call(MasterObserver oserver, ObserverContext ctx)
throws IOException {
- oserver.preListLocks(ctx);
+ oserver.preGetLocks(ctx);
}
});
}
- public void postListLocks(final List lockInfoList) throws IOException {
+ public void postGetLocks(final List lockedResources) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override
public void call(MasterObserver oserver, ObserverContext ctx)
throws IOException {
- oserver.postListLocks(ctx, lockInfoList);
+ oserver.postGetLocks(ctx, lockedResources);
}
});
}
@@ -1837,7 +1838,7 @@ public class MasterCoprocessorHost
}
public void preRequestLock(String namespace, TableName tableName, HRegionInfo[] regionInfos,
- LockProcedure.LockType type, String description) throws IOException {
+ LockType type, String description) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override
public void call(MasterObserver oserver, ObserverContext ctx)
@@ -1848,7 +1849,7 @@ public class MasterCoprocessorHost
}
public void postRequestLock(String namespace, TableName tableName, HRegionInfo[] regionInfos,
- LockProcedure.LockType type, String description) throws IOException {
+ LockType type, String description) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
@Override
public void call(MasterObserver oserver, ObserverContext ctx)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
index d092efeb4d6..52b88db1b38 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java
@@ -35,8 +35,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.master.locking.LockManager;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
/**
@@ -120,7 +120,7 @@ public class MasterMobCompactionThread {
public void run() {
// These locks are on dummy table names, and only used for compaction/mob file cleaning.
final LockManager.MasterLock lock = master.getLockManager().createMasterLock(
- MobUtils.getTableLockName(tableName), LockProcedure.LockType.EXCLUSIVE,
+ MobUtils.getTableLockName(tableName), LockType.EXCLUSIVE,
this.getClass().getName() + ": mob compaction");
try {
for (ColumnFamilyDescriptor hcd : hcds) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 3ec2c45c64f..971fa3bc9d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -60,8 +59,10 @@ import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil.NonceProcedureRunnable;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.procedure.MasterProcedureManager;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.quotas.QuotaObserverChore;
import org.apache.hadoop.hbase.quotas.QuotaUtil;
@@ -82,8 +83,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegi
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.*;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
@@ -92,8 +95,111 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockH
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.*;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;
@@ -108,6 +214,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRe
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
@@ -116,10 +224,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRe
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse;
@@ -1078,13 +1186,13 @@ public class MasterRpcServices extends RSRpcServices
}
@Override
- public ListProceduresResponse listProcedures(
+ public GetProceduresResponse getProcedures(
RpcController rpcController,
- ListProceduresRequest request) throws ServiceException {
+ GetProceduresRequest request) throws ServiceException {
try {
- final ListProceduresResponse.Builder response = ListProceduresResponse.newBuilder();
- for (ProcedureInfo p: master.listProcedures()) {
- response.addProcedure(ProtobufUtil.toProtoProcedure(p));
+ final GetProceduresResponse.Builder response = GetProceduresResponse.newBuilder();
+ for (Procedure> p: master.getProcedures()) {
+ response.addProcedure(ProcedureUtil.convertToProtoProcedure(p));
}
return response.build();
} catch (IOException e) {
@@ -1093,14 +1201,14 @@ public class MasterRpcServices extends RSRpcServices
}
@Override
- public ListLocksResponse listLocks(
+ public GetLocksResponse getLocks(
RpcController controller,
- ListLocksRequest request) throws ServiceException {
+ GetLocksRequest request) throws ServiceException {
try {
- final ListLocksResponse.Builder builder = ListLocksResponse.newBuilder();
+ final GetLocksResponse.Builder builder = GetLocksResponse.newBuilder();
- for (LockInfo lockInfo: master.listLocks()) {
- builder.addLock(ProtobufUtil.toProtoLockInfo(lockInfo));
+ for (LockedResource lockedResource: master.getLocks()) {
+ builder.addLock(ProcedureUtil.convertToProtoLockedResource(lockedResource));
}
return builder.build();
@@ -1655,28 +1763,28 @@ public class MasterRpcServices extends RSRpcServices
SecurityCapabilitiesResponse.Builder response = SecurityCapabilitiesResponse.newBuilder();
try {
master.checkInitialized();
- Set capabilities = new HashSet<>();
+ Set capabilities = new HashSet<>();
// Authentication
if (User.isHBaseSecurityEnabled(master.getConfiguration())) {
- capabilities.add(Capability.SECURE_AUTHENTICATION);
+ capabilities.add(SecurityCapabilitiesResponse.Capability.SECURE_AUTHENTICATION);
} else {
- capabilities.add(Capability.SIMPLE_AUTHENTICATION);
+ capabilities.add(SecurityCapabilitiesResponse.Capability.SIMPLE_AUTHENTICATION);
}
// The AccessController can provide AUTHORIZATION and CELL_AUTHORIZATION
if (master.cpHost != null &&
master.cpHost.findCoprocessor(AccessController.class.getName()) != null) {
if (AccessController.isAuthorizationSupported(master.getConfiguration())) {
- capabilities.add(Capability.AUTHORIZATION);
+ capabilities.add(SecurityCapabilitiesResponse.Capability.AUTHORIZATION);
}
if (AccessController.isCellAuthorizationSupported(master.getConfiguration())) {
- capabilities.add(Capability.CELL_AUTHORIZATION);
+ capabilities.add(SecurityCapabilitiesResponse.Capability.CELL_AUTHORIZATION);
}
}
// The VisibilityController can provide CELL_VISIBILITY
if (master.cpHost != null &&
master.cpHost.findCoprocessor(VisibilityController.class.getName()) != null) {
if (VisibilityController.isCellAuthorizationSupported(master.getConfiguration())) {
- capabilities.add(Capability.CELL_VISIBILITY);
+ capabilities.add(SecurityCapabilitiesResponse.Capability.CELL_VISIBILITY);
}
}
response.addAllCapabilities(capabilities);
@@ -1846,7 +1954,7 @@ public class MasterRpcServices extends RSRpcServices
throw new IllegalArgumentException("Empty description");
}
NonceProcedureRunnable npr;
- LockProcedure.LockType type = LockProcedure.LockType.valueOf(request.getLockType().name());
+ LockType type = LockType.valueOf(request.getLockType().name());
if (request.getRegionInfoCount() > 0) {
final HRegionInfo[] regionInfos = new HRegionInfo[request.getRegionInfoCount()];
for (int i = 0; i < request.getRegionInfoCount(); ++i) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index cde9e34064f..6d5c53f3924 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -22,7 +22,6 @@ import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableDescriptors;
@@ -41,7 +40,8 @@ import org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.procedure.MasterProcedureManagerHost;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
@@ -362,18 +362,18 @@ public interface MasterServices extends Server {
throws IOException;
/**
- * List procedures
+ * Get procedures
* @return procedure list
* @throws IOException
*/
- public List listProcedures() throws IOException;
+ public List> getProcedures() throws IOException;
/**
- * List locks
+ * Get locks
* @return lock list
* @throws IOException
*/
- public List listLocks() throws IOException;
+ public List getLocks() throws IOException;
/**
* Get list of table descriptors by namespace
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
index 476c65ce2b8..2e4ff9afbdb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.locking.LockManager;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.procedure2.LockType;
/**
* The Class MobCompactChore for running compaction regularly to merge small mob files.
@@ -64,7 +64,7 @@ public class MobCompactionChore extends ScheduledChore {
boolean reported = false;
try {
final LockManager.MasterLock lock = master.getLockManager().createMasterLock(
- MobUtils.getTableLockName(htd.getTableName()), LockProcedure.LockType.EXCLUSIVE,
+ MobUtils.getTableLockName(htd.getTableName()), LockType.EXCLUSIVE,
this.getClass().getName() + ": mob compaction");
for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) {
if (!hcd.isMobEnabled()) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
index d78ba74d7d0..abc9796f5d7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java
@@ -20,9 +20,6 @@
package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -38,6 +35,7 @@ import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher.RegionOpenOperation;
import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -121,7 +119,8 @@ public class AssignProcedure extends RegionTransitionProcedure {
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
final AssignRegionStateData.Builder state = AssignRegionStateData.newBuilder()
.setTransitionState(getTransitionState())
.setRegionInfo(HRegionInfo.convert(getRegionInfo()));
@@ -131,12 +130,13 @@ public class AssignProcedure extends RegionTransitionProcedure {
if (this.targetServer != null) {
state.setTargetServer(ProtobufUtil.toServerName(this.targetServer));
}
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- final AssignRegionStateData state = AssignRegionStateData.parseDelimitedFrom(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ final AssignRegionStateData state = serializer.deserialize(AssignRegionStateData.class);
setTransitionState(state.getTransitionState());
setRegionInfo(HRegionInfo.convert(state.getRegionInfo()));
forceNewPlan = state.getForceNewPlan();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
index c7d97ee305d..27f6707c834 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java
@@ -18,9 +18,6 @@
package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -29,6 +26,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.procedure.AbstractStateMachineTableProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -132,21 +130,23 @@ extends AbstractStateMachineTableProcedure {
}
@Override
- protected void serializeStateData(OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
final MasterProcedureProtos.GCMergedRegionsStateData.Builder msg =
MasterProcedureProtos.GCMergedRegionsStateData.newBuilder().
setParentA(HRegionInfo.convert(this.father)).
setParentB(HRegionInfo.convert(this.mother)).
setMergedChild(HRegionInfo.convert(this.mergedChild));
- msg.build().writeDelimitedTo(stream);
+ serializer.serialize(msg.build());
}
@Override
- protected void deserializeStateData(InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
final MasterProcedureProtos.GCMergedRegionsStateData msg =
- MasterProcedureProtos.GCMergedRegionsStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.GCMergedRegionsStateData.class);
this.father = HRegionInfo.convert(msg.getParentA());
this.mother = HRegionInfo.convert(msg.getParentB());
this.mergedChild = HRegionInfo.convert(msg.getMergedChild());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
index 29d06761854..2d7f239a170 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java
@@ -18,9 +18,6 @@
package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
@@ -32,6 +29,7 @@ import org.apache.hadoop.hbase.favored.FavoredNodesManager;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -131,20 +129,22 @@ public class GCRegionProcedure extends AbstractStateMachineRegionProcedure call() throws IOException {
return splitStoreFile(regionFs, family, sf);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
index c9f0fac4786..7d875b6acda 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java
@@ -20,11 +20,6 @@
package org.apache.hadoop.hbase.master.assignment;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.ConnectException;
-import java.util.concurrent.atomic.AtomicBoolean;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
@@ -40,6 +35,7 @@ import org.apache.hadoop.hbase.master.procedure.ServerCrashException;
import org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher.RegionCloseOperation;
import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
@@ -48,7 +44,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
import org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
-
/**
* Procedure that describes the unassignment of a single region.
* There can only be one RegionTransitionProcedure -- i.e. an assign or an unassign -- per region
@@ -128,7 +123,8 @@ public class UnassignProcedure extends RegionTransitionProcedure {
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
UnassignRegionStateData.Builder state = UnassignRegionStateData.newBuilder()
.setTransitionState(getTransitionState())
.setHostingServer(ProtobufUtil.toServerName(this.hostingServer))
@@ -139,12 +135,14 @@ public class UnassignProcedure extends RegionTransitionProcedure {
if (force) {
state.setForce(true);
}
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- final UnassignRegionStateData state = UnassignRegionStateData.parseDelimitedFrom(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ final UnassignRegionStateData state =
+ serializer.deserialize(UnassignRegionStateData.class);
setTransitionState(state.getTransitionState());
setRegionInfo(HRegionInfo.convert(state.getRegionInfo()));
this.hostingServer = ProtobufUtil.toServerName(state.getHostingServer());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
index 6c8bbbac3be..87ad557dcc9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java
@@ -18,19 +18,20 @@
*/
package org.apache.hadoop.hbase.master.locking;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.util.NonceKey;
-import java.io.IOException;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-
/**
* Functions to acquire lock on table/namespace/regions.
*/
@@ -50,12 +51,12 @@ public final class LockManager {
}
public MasterLock createMasterLock(final String namespace,
- final LockProcedure.LockType type, final String description) {
+ final LockType type, final String description) {
return new MasterLock(namespace, type, description);
}
public MasterLock createMasterLock(final TableName tableName,
- final LockProcedure.LockType type, final String description) {
+ final LockType type, final String description) {
return new MasterLock(tableName, type, description);
}
@@ -81,13 +82,13 @@ public final class LockManager {
private final String namespace;
private final TableName tableName;
private final HRegionInfo[] regionInfos;
- private final LockProcedure.LockType type;
+ private final LockType type;
private final String description;
private LockProcedure proc = null;
public MasterLock(final String namespace,
- final LockProcedure.LockType type, final String description) {
+ final LockType type, final String description) {
this.namespace = namespace;
this.tableName = null;
this.regionInfos = null;
@@ -96,7 +97,7 @@ public final class LockManager {
}
public MasterLock(final TableName tableName,
- final LockProcedure.LockType type, final String description) {
+ final LockType type, final String description) {
this.namespace = null;
this.tableName = tableName;
this.regionInfos = null;
@@ -108,7 +109,7 @@ public final class LockManager {
this.namespace = null;
this.tableName = null;
this.regionInfos = regionInfos;
- this.type = LockProcedure.LockType.EXCLUSIVE;
+ this.type = LockType.EXCLUSIVE;
this.description = description;
}
@@ -203,7 +204,7 @@ public final class LockManager {
* locks, regular heartbeats are required to keep the lock held.
*/
public class RemoteLocks {
- public long requestNamespaceLock(final String namespace, final LockProcedure.LockType type,
+ public long requestNamespaceLock(final String namespace, final LockType type,
final String description, final NonceKey nonceKey)
throws IllegalArgumentException, IOException {
master.getMasterCoprocessorHost().preRequestLock(namespace, null, null, type, description);
@@ -214,7 +215,7 @@ public final class LockManager {
return proc.getProcId();
}
- public long requestTableLock(final TableName tableName, final LockProcedure.LockType type,
+ public long requestTableLock(final TableName tableName, final LockType type,
final String description, final NonceKey nonceKey)
throws IllegalArgumentException, IOException {
master.getMasterCoprocessorHost().preRequestLock(null, tableName, null, type, description);
@@ -232,12 +233,12 @@ public final class LockManager {
final NonceKey nonceKey)
throws IllegalArgumentException, IOException {
master.getMasterCoprocessorHost().preRequestLock(null, null, regionInfos,
- LockProcedure.LockType.EXCLUSIVE, description);
+ LockType.EXCLUSIVE, description);
final LockProcedure proc = new LockProcedure(master.getConfiguration(), regionInfos,
- LockProcedure.LockType.EXCLUSIVE, description, null);
+ LockType.EXCLUSIVE, description, null);
submitProcedure(proc, nonceKey);
master.getMasterCoprocessorHost().postRequestLock(null, null, regionInfos,
- LockProcedure.LockType.EXCLUSIVE, description);
+ LockType.EXCLUSIVE, description);
return proc.getProcId();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
index edbba833361..e7b416890b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
@@ -27,8 +27,10 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
@@ -36,8 +38,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockP
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
@@ -66,9 +66,6 @@ public final class LockProcedure extends Procedure
public static final String LOCAL_MASTER_LOCKS_TIMEOUT_MS_CONF =
"hbase.master.procedure.local.master.locks.timeout.ms";
- // Also used in serialized states, changes will affect backward compatibility.
- public enum LockType { SHARED, EXCLUSIVE }
-
private String namespace;
private TableName tableName;
private HRegionInfo[] regionInfos;
@@ -265,7 +262,8 @@ public final class LockProcedure extends Procedure
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
final LockProcedureData.Builder builder = LockProcedureData.newBuilder()
.setLockType(LockServiceProtos.LockType.valueOf(type.name()))
.setDescription(description);
@@ -281,12 +279,13 @@ public final class LockProcedure extends Procedure
if (lockAcquireLatch != null) {
builder.setIsMasterLock(true);
}
- builder.build().writeDelimitedTo(stream);
+ serializer.serialize(builder.build());
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- final LockProcedureData state = LockProcedureData.parseDelimitedFrom(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ final LockProcedureData state = serializer.deserialize(LockProcedureData.class);
type = LockType.valueOf(state.getLockType().name());
description = state.getDescription();
if (state.getRegionInfoCount() > 0) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.java
index 41502d45ffd..c254cc44459 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
@@ -84,6 +82,7 @@ public abstract class AbstractStateMachineRegionProcedure
* @param env MasterProcedureEnv
* @throws IOException
*/
+ @Override
protected void checkTableModifiable(final MasterProcedureEnv env) throws IOException {
// Checks whether the table exists
if (!MetaTableAccessor.tableExists(env.getMasterServices().getConnection(), getTableName())) {
@@ -96,6 +95,7 @@ public abstract class AbstractStateMachineRegionProcedure
return true;
}
+ @Override
protected LockState acquireLock(final MasterProcedureEnv env) {
if (env.waitInitialized(this)) return LockState.LOCK_EVENT_WAIT;
if (env.getProcedureScheduler().waitRegions(this, getTableName(), getRegion())) {
@@ -105,6 +105,7 @@ public abstract class AbstractStateMachineRegionProcedure
return LockState.LOCK_ACQUIRED;
}
+ @Override
protected void releaseLock(final MasterProcedureEnv env) {
this.lock = false;
env.getProcedureScheduler().wakeRegions(this, getTableName(), getRegion());
@@ -120,14 +121,16 @@ public abstract class AbstractStateMachineRegionProcedure
}
@Override
- protected void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
- HRegionInfo.convert(getRegion()).writeDelimitedTo(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
+ serializer.serialize(HRegionInfo.convert(getRegion()));
}
@Override
- protected void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
- this.hri = HRegionInfo.convert(HBaseProtos.RegionInfo.parseDelimitedFrom(stream));
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
+ this.hri = HRegionInfo.convert(serializer.deserialize(HBaseProtos.RegionInfo.class));
}
}
\ No newline at end of file
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
index f19195eecea..84fc7e53a09 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.List;
import org.apache.commons.logging.Log;
@@ -33,6 +31,7 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyState;
@@ -166,8 +165,9 @@ public class AddColumnFamilyProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.AddColumnFamilyStateData.Builder addCFMsg =
MasterProcedureProtos.AddColumnFamilyStateData.newBuilder()
@@ -179,15 +179,16 @@ public class AddColumnFamilyProcedure
.setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedTableDescriptor));
}
- addCFMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(addCFMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.AddColumnFamilyStateData addCFMsg =
- MasterProcedureProtos.AddColumnFamilyStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.AddColumnFamilyStateData.class);
setUser(MasterProcedureUtil.toUserInfo(addCFMsg.getUserInfo()));
tableName = ProtobufUtil.toTableName(addCFMsg.getTableName());
cfDescriptor = ProtobufUtil.toColumnFamilyDescriptor(addCFMsg.getColumnfamilySchema());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
index cc39f5310d1..7e21ad6f149 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@@ -47,6 +45,7 @@ import org.apache.hadoop.hbase.master.MetricsSnapshot;
import org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.CreateHdfsRegions;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -244,8 +243,9 @@ public class CloneSnapshotProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.CloneSnapshotStateData.Builder cloneSnapshotMsg =
MasterProcedureProtos.CloneSnapshotStateData.newBuilder()
@@ -271,15 +271,16 @@ public class CloneSnapshotProcedure
cloneSnapshotMsg.addParentToChildRegionsPairList(parentToChildrenPair);
}
}
- cloneSnapshotMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(cloneSnapshotMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.CloneSnapshotStateData cloneSnapshotMsg =
- MasterProcedureProtos.CloneSnapshotStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.CloneSnapshotStateData.class);
setUser(MasterProcedureUtil.toUserInfo(cloneSnapshotMsg.getUserInfo()));
snapshot = cloneSnapshotMsg.getSnapshot();
tableDescriptor = ProtobufUtil.toTableDescriptor(cloneSnapshotMsg.getTableSchema());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
index 7d651265c53..2cbf8f2ee34 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -29,6 +26,7 @@ import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceState;
@@ -135,21 +133,23 @@ public class CreateNamespaceProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.CreateNamespaceStateData.Builder createNamespaceMsg =
MasterProcedureProtos.CreateNamespaceStateData.newBuilder().setNamespaceDescriptor(
ProtobufUtil.toProtoNamespaceDescriptor(this.nsDescriptor));
- createNamespaceMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(createNamespaceMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.CreateNamespaceStateData createNamespaceMsg =
- MasterProcedureProtos.CreateNamespaceStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.CreateNamespaceStateData.class);
nsDescriptor = ProtobufUtil.toNamespaceDescriptor(createNamespaceMsg.getNamespaceDescriptor());
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index 14604fdac0e..a5c15842d43 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
@@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.MasterFileSystem;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -183,8 +182,9 @@ public class CreateTableProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.CreateTableStateData.Builder state =
MasterProcedureProtos.CreateTableStateData.newBuilder()
@@ -195,15 +195,16 @@ public class CreateTableProcedure
state.addRegionInfo(HRegionInfo.convert(hri));
}
}
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.CreateTableStateData state =
- MasterProcedureProtos.CreateTableStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.CreateTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(state.getUserInfo()));
tableDescriptor = ProtobufUtil.toTableDescriptor(state.getTableSchema());
if (state.getRegionInfoCount() == 0) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
index 9ec814ab942..12726edc9f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.List;
import org.apache.commons.logging.Log;
@@ -32,6 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -172,8 +171,9 @@ public class DeleteColumnFamilyProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.DeleteColumnFamilyStateData.Builder deleteCFMsg =
MasterProcedureProtos.DeleteColumnFamilyStateData.newBuilder()
@@ -185,14 +185,15 @@ public class DeleteColumnFamilyProcedure
.setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedTableDescriptor));
}
- deleteCFMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(deleteCFMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.DeleteColumnFamilyStateData deleteCFMsg =
- MasterProcedureProtos.DeleteColumnFamilyStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.DeleteColumnFamilyStateData.class);
setUser(MasterProcedureUtil.toUserInfo(deleteCFMsg.getUserInfo()));
tableName = ProtobufUtil.toTableName(deleteCFMsg.getTableName());
familyName = deleteCFMsg.getColumnfamilyName().toByteArray();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
index d91a6e14f69..a66e77c1fec 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
@@ -20,9 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.FileNotFoundException;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
@@ -35,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceState;
@@ -147,8 +145,9 @@ public class DeleteNamespaceProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.DeleteNamespaceStateData.Builder deleteNamespaceMsg =
MasterProcedureProtos.DeleteNamespaceStateData.newBuilder().setNamespaceName(namespaceName);
@@ -156,15 +155,16 @@ public class DeleteNamespaceProcedure
deleteNamespaceMsg.setNamespaceDescriptor(
ProtobufUtil.toProtoNamespaceDescriptor(this.nsDescriptor));
}
- deleteNamespaceMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(deleteNamespaceMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.DeleteNamespaceStateData deleteNamespaceMsg =
- MasterProcedureProtos.DeleteNamespaceStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.DeleteNamespaceStateData.class);
namespaceName = deleteNamespaceMsg.getNamespaceName();
if (deleteNamespaceMsg.hasNamespaceDescriptor()) {
nsDescriptor =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
index 04dfc60a465..0f665f2ddec 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
@@ -48,6 +46,7 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -203,8 +202,9 @@ public class DeleteTableProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.DeleteTableStateData.Builder state =
MasterProcedureProtos.DeleteTableStateData.newBuilder()
@@ -215,15 +215,16 @@ public class DeleteTableProcedure
state.addRegionInfo(HRegionInfo.convert(hri));
}
}
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.DeleteTableStateData state =
- MasterProcedureProtos.DeleteTableStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.DeleteTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(state.getUserInfo()));
tableName = ProtobufUtil.toTableName(state.getTableName());
if (state.getRegionInfoCount() == 0) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
index 0872a0151b9..20eca94525c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -33,6 +30,7 @@ import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState;
@@ -171,8 +169,9 @@ public class DisableTableProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.DisableTableStateData.Builder disableTableMsg =
MasterProcedureProtos.DisableTableStateData.newBuilder()
@@ -180,15 +179,16 @@ public class DisableTableProcedure
.setTableName(ProtobufUtil.toProtoTableName(tableName))
.setSkipTableStateCheck(skipTableStateCheck);
- disableTableMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(disableTableMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.DisableTableStateData disableTableMsg =
- MasterProcedureProtos.DisableTableStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.DisableTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(disableTableMsg.getUserInfo()));
tableName = ProtobufUtil.toTableName(disableTableMsg.getTableName());
skipTableStateCheck = disableTableMsg.getSkipTableStateCheck();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
index 4f4b5b1906b..ff43d27e104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -32,6 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableState;
@@ -170,8 +168,9 @@ public class EnableTableProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.EnableTableStateData.Builder enableTableMsg =
MasterProcedureProtos.EnableTableStateData.newBuilder()
@@ -179,15 +178,16 @@ public class EnableTableProcedure
.setTableName(ProtobufUtil.toProtoTableName(tableName))
.setSkipTableStateCheck(skipTableStateCheck);
- enableTableMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(enableTableMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.EnableTableStateData enableTableMsg =
- MasterProcedureProtos.EnableTableStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.EnableTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(enableTableMsg.getUserInfo()));
tableName = ProtobufUtil.toTableName(enableTableMsg.getTableName());
skipTableStateCheck = enableTableMsg.getSkipTableStateCheck();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index 564b86d6bf5..e14c037035b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.master.procedure;
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -32,7 +30,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
@@ -42,11 +39,13 @@ import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType;
import org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler;
import org.apache.hadoop.hbase.procedure2.LockAndQueue;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
import org.apache.hadoop.hbase.procedure2.LockStatus;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.LockedResourceType;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureDeque;
-import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.util.AvlUtil.AvlIterableList;
import org.apache.hadoop.hbase.util.AvlUtil.AvlKeyComparator;
import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode;
@@ -238,57 +237,42 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
return pollResult;
}
- private LockInfo createLockInfo(LockInfo.ResourceType resourceType,
+ private LockedResource createLockedResource(LockedResourceType resourceType,
String resourceName, LockAndQueue queue) {
- LockInfo info = new LockInfo();
-
- info.setResourceType(resourceType);
- info.setResourceName(resourceName);
+ LockType lockType;
+ Procedure> exclusiveLockOwnerProcedure;
+ int sharedLockCount;
if (queue.hasExclusiveLock()) {
- info.setLockType(LockInfo.LockType.EXCLUSIVE);
-
- Procedure> exclusiveLockOwnerProcedure = queue.getExclusiveLockOwnerProcedure();
- ProcedureInfo exclusiveLockOwnerProcedureInfo =
- ProcedureUtil.convertToProcedureInfo(exclusiveLockOwnerProcedure);
- info.setExclusiveLockOwnerProcedure(exclusiveLockOwnerProcedureInfo);
- } else if (queue.getSharedLockCount() > 0) {
- info.setLockType(LockInfo.LockType.SHARED);
- info.setSharedLockCount(queue.getSharedLockCount());
+ lockType = LockType.EXCLUSIVE;
+ exclusiveLockOwnerProcedure = queue.getExclusiveLockOwnerProcedure();
+ sharedLockCount = 0;
+ } else {
+ lockType = LockType.SHARED;
+ exclusiveLockOwnerProcedure = null;
+ sharedLockCount = queue.getSharedLockCount();
}
+ List> waitingProcedures = new ArrayList<>();
+
for (Procedure> procedure : queue) {
if (!(procedure instanceof LockProcedure)) {
continue;
}
- LockProcedure lockProcedure = (LockProcedure)procedure;
- LockInfo.WaitingProcedure waitingProcedure = new LockInfo.WaitingProcedure();
-
- switch (lockProcedure.getType()) {
- case EXCLUSIVE:
- waitingProcedure.setLockType(LockInfo.LockType.EXCLUSIVE);
- break;
- case SHARED:
- waitingProcedure.setLockType(LockInfo.LockType.SHARED);
- break;
- }
-
- ProcedureInfo procedureInfo = ProcedureUtil.convertToProcedureInfo(lockProcedure);
- waitingProcedure.setProcedure(procedureInfo);
-
- info.addWaitingProcedure(waitingProcedure);
+ waitingProcedures.add(procedure);
}
- return info;
+ return new LockedResource(resourceType, resourceName, lockType,
+ exclusiveLockOwnerProcedure, sharedLockCount, waitingProcedures);
}
@Override
- public List listLocks() {
+ public List getLocks() {
schedLock();
try {
- List lockInfos = new ArrayList<>();
+ List lockedResources = new ArrayList<>();
for (Entry entry : locking.serverLocks
.entrySet()) {
@@ -296,9 +280,9 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
LockAndQueue queue = entry.getValue();
if (queue.isLocked()) {
- LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.SERVER,
- serverName, queue);
- lockInfos.add(lockInfo);
+ LockedResource lockedResource =
+ createLockedResource(LockedResourceType.SERVER, serverName, queue);
+ lockedResources.add(lockedResource);
}
}
@@ -308,9 +292,9 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
LockAndQueue queue = entry.getValue();
if (queue.isLocked()) {
- LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.NAMESPACE,
- namespaceName, queue);
- lockInfos.add(lockInfo);
+ LockedResource lockedResource =
+ createLockedResource(LockedResourceType.NAMESPACE, namespaceName, queue);
+ lockedResources.add(lockedResource);
}
}
@@ -320,9 +304,9 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
LockAndQueue queue = entry.getValue();
if (queue.isLocked()) {
- LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.TABLE,
- tableName, queue);
- lockInfos.add(lockInfo);
+ LockedResource lockedResource =
+ createLockedResource(LockedResourceType.TABLE, tableName, queue);
+ lockedResources.add(lockedResource);
}
}
@@ -331,20 +315,21 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
LockAndQueue queue = entry.getValue();
if (queue.isLocked()) {
- LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.REGION,
- regionName, queue);
- lockInfos.add(lockInfo);
+ LockedResource lockedResource =
+ createLockedResource(LockedResourceType.REGION, regionName, queue);
+ lockedResources.add(lockedResource);
}
}
- return lockInfos;
+ return lockedResources;
} finally {
schedUnlock();
}
}
@Override
- public LockInfo getLockInfoForResource(LockInfo.ResourceType resourceType, String resourceName) {
+ public LockedResource getLockResource(LockedResourceType resourceType,
+ String resourceName) {
LockAndQueue queue = null;
schedLock();
try {
@@ -363,7 +348,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
break;
}
- return queue != null ? createLockInfo(resourceType, resourceName, queue) : null;
+ return queue != null ? createLockedResource(resourceType, resourceName, queue) : null;
} finally {
schedUnlock();
}
@@ -624,17 +609,17 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
/**
* Get lock info for a resource of specified type and name and log details
*/
- protected void logLockInfoForResource(LockInfo.ResourceType resourceType, String resourceName) {
+ protected void logLockedResource(LockedResourceType resourceType, String resourceName) {
if (!LOG.isDebugEnabled()) {
return;
}
- LockInfo lockInfo = getLockInfoForResource(resourceType, resourceName);
- if (lockInfo != null) {
+ LockedResource lockedResource = getLockResource(resourceType, resourceName);
+ if (lockedResource != null) {
String msg = resourceType.toString() + " '" + resourceName + "', shared lock count=" +
- lockInfo.getSharedLockCount();
+ lockedResource.getSharedLockCount();
- ProcedureInfo proc = lockInfo.getExclusiveLockOwnerProcedure();
+ Procedure> proc = lockedResource.getExclusiveLockOwnerProcedure();
if (proc != null) {
msg += ", exclusively locked by procId=" + proc.getProcId();
}
@@ -657,13 +642,13 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
final LockAndQueue tableLock = locking.getTableLock(table);
if (!namespaceLock.trySharedLock()) {
waitProcedure(namespaceLock, procedure);
- logLockInfoForResource(LockInfo.ResourceType.NAMESPACE, namespace);
+ logLockedResource(LockedResourceType.NAMESPACE, namespace);
return true;
}
if (!tableLock.tryExclusiveLock(procedure)) {
namespaceLock.releaseSharedLock();
waitProcedure(tableLock, procedure);
- logLockInfoForResource(LockInfo.ResourceType.TABLE, table.getNameAsString());
+ logLockedResource(LockedResourceType.TABLE, table.getNameAsString());
return true;
}
removeFromRunQueue(tableRunQueue, getTableQueue(table));
@@ -920,7 +905,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
locking.getTableLock(TableName.NAMESPACE_TABLE_NAME);
if (!systemNamespaceTableLock.trySharedLock()) {
waitProcedure(systemNamespaceTableLock, procedure);
- logLockInfoForResource(LockInfo.ResourceType.TABLE,
+ logLockedResource(LockedResourceType.TABLE,
TableName.NAMESPACE_TABLE_NAME.getNameAsString());
return true;
}
@@ -929,7 +914,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
if (!namespaceLock.tryExclusiveLock(procedure)) {
systemNamespaceTableLock.releaseSharedLock();
waitProcedure(namespaceLock, procedure);
- logLockInfoForResource(LockInfo.ResourceType.NAMESPACE, namespace);
+ logLockedResource(LockedResourceType.NAMESPACE, namespace);
return true;
}
return false;
@@ -982,7 +967,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
return false;
}
waitProcedure(lock, procedure);
- logLockInfoForResource(LockInfo.ResourceType.SERVER, serverName.getServerName());
+ logLockedResource(LockedResourceType.SERVER, serverName.getServerName());
return true;
} finally {
schedUnlock();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
index ac86dabbef1..8fc9d289fcd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
@@ -31,6 +28,7 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyState;
@@ -159,8 +157,9 @@ public class ModifyColumnFamilyProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.ModifyColumnFamilyStateData.Builder modifyCFMsg =
MasterProcedureProtos.ModifyColumnFamilyStateData.newBuilder()
@@ -172,15 +171,16 @@ public class ModifyColumnFamilyProcedure
.setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedtableDescriptor));
}
- modifyCFMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(modifyCFMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.ModifyColumnFamilyStateData modifyCFMsg =
- MasterProcedureProtos.ModifyColumnFamilyStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.ModifyColumnFamilyStateData.class);
setUser(MasterProcedureUtil.toUserInfo(modifyCFMsg.getUserInfo()));
tableName = ProtobufUtil.toTableName(modifyCFMsg.getTableName());
cfDescriptor = ProtobufUtil.toColumnFamilyDescriptor(modifyCFMsg.getColumnfamilySchema());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
index 17e7197bcc1..99c18fb33a7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
@@ -19,15 +19,13 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceState;
@@ -130,8 +128,9 @@ public class ModifyNamespaceProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.ModifyNamespaceStateData.Builder modifyNamespaceMsg =
MasterProcedureProtos.ModifyNamespaceStateData.newBuilder().setNamespaceDescriptor(
@@ -140,15 +139,16 @@ public class ModifyNamespaceProcedure
modifyNamespaceMsg.setUnmodifiedNamespaceDescriptor(
ProtobufUtil.toProtoNamespaceDescriptor(this.oldNsDescriptor));
}
- modifyNamespaceMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(modifyNamespaceMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.ModifyNamespaceStateData modifyNamespaceMsg =
- MasterProcedureProtos.ModifyNamespaceStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.ModifyNamespaceStateData.class);
newNsDescriptor =
ProtobufUtil.toNamespaceDescriptor(modifyNamespaceMsg.getNamespaceDescriptor());
if (modifyNamespaceMsg.hasUnmodifiedNamespaceDescriptor()) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
index 9741faa2673..0fc08c6e1f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -42,6 +40,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableState;
@@ -185,8 +184,9 @@ public class ModifyTableProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.ModifyTableStateData.Builder modifyTableMsg =
MasterProcedureProtos.ModifyTableStateData.newBuilder()
@@ -199,15 +199,16 @@ public class ModifyTableProcedure
.setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedTableDescriptor));
}
- modifyTableMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(modifyTableMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.ModifyTableStateData modifyTableMsg =
- MasterProcedureProtos.ModifyTableStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.ModifyTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(modifyTableMsg.getUserInfo()));
modifiedTableDescriptor = ProtobufUtil.toTableDescriptor(modifyTableMsg.getModifiedTableSchema());
deleteColumnFamilyInModify = modifyTableMsg.getDeleteColumnFamilyInModify();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java
new file mode 100644
index 00000000000..080d6ffe583
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+import org.apache.hadoop.hbase.util.JRubyFormat;
+
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class ProcedureDescriber {
+ private ProcedureDescriber() {
+ }
+
+ private static Object parametersToObject(Procedure> proc) {
+ try {
+ ProcedureProtos.Procedure protoProc = ProcedureUtil.convertToProtoProcedure(proc);
+ List parameters = protoProc.getStateMessageList().stream()
+ .map((any) -> {
+ try {
+ return ProtobufMessageConverter.toJavaObject(any);
+ } catch (InvalidProtocolBufferException e) {
+ return e.toString();
+ }
+ }).collect(Collectors.toList());
+ return parameters;
+ } catch (IOException e) {
+ return e.toString();
+ }
+ }
+
+ public static String describe(Procedure> proc) {
+ Map description = new LinkedHashMap<>();
+
+ description.put("ID", proc.getProcId());
+ description.put("PARENT_ID", proc.getParentProcId());
+ description.put("STATE", proc.getState());
+ description.put("OWNER", proc.getOwner());
+ description.put("TYPE", proc.getProcName());
+ description.put("START_TIME", new Date(proc.getSubmittedTime()));
+ description.put("LAST_UPDATE", new Date(proc.getLastUpdate()));
+
+ if (proc.isFailed()) {
+ description.put("ERRORS", proc.getException().unwrapRemoteIOException().getMessage());
+ }
+ description.put("PARAMETERS", parametersToObject(proc));
+
+ return JRubyFormat.print(description);
+ }
+
+ public static String describeParameters(Procedure> proc) {
+ Object object = parametersToObject(proc);
+ return JRubyFormat.print(object);
+ }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
index 72f0648bd1e..7ae81ba0989 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.assignment.AssignProcedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
@@ -38,7 +39,6 @@ import org.apache.zookeeper.KeeperException;
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
import java.util.Set;
/**
@@ -81,7 +81,7 @@ public class RecoverMetaProcedure
/**
* This constructor is also used when deserializing from a procedure store; we'll construct one
- * of these then call {@link #deserializeStateData(InputStream)}. Do not use directly.
+ * of these then call #deserializeStateData(InputStream). Do not use directly.
*/
public RecoverMetaProcedure() {
this(null, false);
@@ -183,22 +183,24 @@ public class RecoverMetaProcedure
}
@Override
- protected void serializeStateData(OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.RecoverMetaStateData.Builder state =
MasterProcedureProtos.RecoverMetaStateData.newBuilder().setShouldSplitWal(shouldSplitWal);
if (failedMetaServer != null) {
state.setFailedMetaServer(ProtobufUtil.toServerName(failedMetaServer));
}
state.setReplicaId(replicaId);
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- protected void deserializeStateData(InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.RecoverMetaStateData state =
- MasterProcedureProtos.RecoverMetaStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.RecoverMetaStateData.class);
this.shouldSplitWal = state.hasShouldSplitWal() && state.getShouldSplitWal();
this.failedMetaServer = state.hasFailedMetaServer() ?
ProtobufUtil.toServerName(state.getFailedMetaServer()) : null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
index 4930396994a..754bb1dd158 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@@ -45,6 +43,7 @@ import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.master.MetricsSnapshot;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -229,8 +228,9 @@ public class RestoreSnapshotProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.RestoreSnapshotStateData.Builder restoreSnapshotMsg =
MasterProcedureProtos.RestoreSnapshotStateData.newBuilder()
@@ -267,15 +267,16 @@ public class RestoreSnapshotProcedure
restoreSnapshotMsg.addParentToChildRegionsPairList (parentToChildrenPair);
}
}
- restoreSnapshotMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(restoreSnapshotMsg.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.RestoreSnapshotStateData restoreSnapshotMsg =
- MasterProcedureProtos.RestoreSnapshotStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.RestoreSnapshotStateData.class);
setUser(MasterProcedureUtil.toUserInfo(restoreSnapshotMsg.getUserInfo()));
snapshot = restoreSnapshotMsg.getSnapshot();
modifiedTableDescriptor =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index c7af53fd0e6..9307c45b5dc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -36,6 +35,7 @@ import org.apache.hadoop.hbase.master.assignment.AssignProcedure;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.master.assignment.RegionTransitionProcedure;
import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
@@ -96,7 +96,7 @@ implements ServerProcedureInterface {
/**
* Used when deserializing from a procedure store; we'll construct one of these then call
- * {@link #deserializeStateData(InputStream)}. Do not use directly.
+ * #deserializeStateData(InputStream). Do not use directly.
*/
public ServerCrashProcedure() {
super();
@@ -285,8 +285,9 @@ implements ServerProcedureInterface {
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.ServerCrashStateData.Builder state =
MasterProcedureProtos.ServerCrashStateData.newBuilder().
@@ -298,15 +299,16 @@ implements ServerProcedureInterface {
state.addRegionsOnCrashedServer(HRegionInfo.convert(hri));
}
}
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.ServerCrashStateData state =
- MasterProcedureProtos.ServerCrashStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.ServerCrashStateData.class);
this.serverName = ProtobufUtil.toServerName(state.getServerName());
this.carryingMeta = state.hasCarryingMeta()? state.getCarryingMeta(): false;
// shouldSplitWAL has a default over in pb so this invocation will always work.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
index 506c67dc8de..8f21298d2d4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.hbase.master.procedure;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -35,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.exceptions.HBaseException;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -209,8 +208,9 @@ public class TruncateTableProcedure
}
@Override
- public void serializeStateData(final OutputStream stream) throws IOException {
- super.serializeStateData(stream);
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.serializeStateData(serializer);
MasterProcedureProtos.TruncateTableStateData.Builder state =
MasterProcedureProtos.TruncateTableStateData.newBuilder()
@@ -226,15 +226,16 @@ public class TruncateTableProcedure
state.addRegionInfo(HRegionInfo.convert(hri));
}
}
- state.build().writeDelimitedTo(stream);
+ serializer.serialize(state.build());
}
@Override
- public void deserializeStateData(final InputStream stream) throws IOException {
- super.deserializeStateData(stream);
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ super.deserializeStateData(serializer);
MasterProcedureProtos.TruncateTableStateData state =
- MasterProcedureProtos.TruncateTableStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(MasterProcedureProtos.TruncateTableStateData.class);
setUser(MasterProcedureUtil.toUserInfo(state.getUserInfo()));
if (state.hasTableSchema()) {
tableDescriptor = ProtobufUtil.toTableDescriptor(state.getTableSchema());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
index 979a3512b45..81bcc0c27fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
@@ -27,14 +27,14 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -45,9 +45,9 @@ import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.MetricsSnapshot;
import org.apache.hadoop.hbase.master.SnapshotSentinel;
import org.apache.hadoop.hbase.master.locking.LockManager;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
@@ -114,7 +114,7 @@ public abstract class TakeSnapshotHandler extends EventHandler implements Snapsh
this.snapshotManifest = SnapshotManifest.create(conf, fs, workingDir, snapshot, monitor);
this.tableLock = master.getLockManager().createMasterLock(
- snapshotTable, LockProcedure.LockType.EXCLUSIVE,
+ snapshotTable, LockType.EXCLUSIVE,
this.getClass().getName() + ": take snapshot " + snapshot.getName());
// prepare the verify
@@ -134,6 +134,7 @@ public abstract class TakeSnapshotHandler extends EventHandler implements Snapsh
return htd;
}
+ @Override
public TakeSnapshotHandler prepare() throws Exception {
super.prepare();
// after this, you should ensure to release this lock in case of exceptions
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 1e63b1306c5..2229bd2d80f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
@@ -78,9 +77,10 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.locking.LockProcedure;
-import org.apache.hadoop.hbase.master.locking.LockProcedure.LockType;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.net.Address;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@@ -1208,31 +1208,32 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
}
@Override
- public void preListProcedures(ObserverContext ctx)
+ public void preGetProcedures(ObserverContext ctx)
throws IOException {
- // We are delegating the authorization check to postListProcedures as we don't have
+ // We are delegating the authorization check to postGetProcedures as we don't have
// any concrete set of procedures to work with
}
@Override
- public void postListProcedures(
+ public void postGetProcedures(
ObserverContext ctx,
- List procInfoList) throws IOException {
- if (procInfoList.isEmpty()) {
+ List> procList) throws IOException {
+ if (procList.isEmpty()) {
return;
}
// Retains only those which passes authorization checks, as the checks weren't done as part
- // of preListProcedures.
- Iterator itr = procInfoList.iterator();
+ // of preGetProcedures.
+ Iterator> itr = procList.iterator();
User user = getActiveUser(ctx);
while (itr.hasNext()) {
- ProcedureInfo procInfo = itr.next();
+ Procedure> proc = itr.next();
try {
- if (!ProcedureInfo.isProcedureOwner(procInfo, user)) {
+ String owner = proc.getOwner();
+ if (owner == null || !owner.equals(user.getShortName())) {
// If the user is not the procedure owner, then we should further probe whether
// he can see the procedure.
- requirePermission(user, "listProcedures", Action.ADMIN);
+ requirePermission(user, "getProcedures", Action.ADMIN);
}
} catch (AccessDeniedException e) {
itr.remove();
@@ -1240,6 +1241,13 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
}
}
+ @Override
+ public void preGetLocks(ObserverContext ctx)
+ throws IOException {
+ User user = getActiveUser(ctx);
+ requirePermission(user, "getLocks", Action.ADMIN);
+ }
+
@Override
public void preMove(ObserverContext c, HRegionInfo region,
ServerName srcServer, ServerName destServer) throws IOException {
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index ce62fa1fa62..63a41cc2d84 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
@@ -29,14 +29,16 @@
import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.master.HMaster"
import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv"
- import="org.apache.hadoop.hbase.ProcedureInfo"
- import="org.apache.hadoop.hbase.procedure2.LockInfo"
+ import="org.apache.hadoop.hbase.master.procedure.ProcedureDescriber"
+ import="org.apache.hadoop.hbase.procedure2.LockedResource"
import="org.apache.hadoop.hbase.procedure2.Procedure"
import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor"
+ import="org.apache.hadoop.hbase.procedure2.ProcedureUtil"
import="org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFile"
import="org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore"
import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-
+ import="org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos"
+ import="org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil"
%>
<%
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
@@ -48,7 +50,7 @@
long millisFromLastRoll = walStore.getMillisFromLastRoll();
ArrayList procedureWALFiles = walStore.getActiveLogs();
Set corruptedWALFiles = walStore.getCorruptedLogs();
- List procedures = procExecutor.listProcedures();
+ List> procedures = procExecutor.getProcedures();
Collections.sort(procedures, new Comparator() {
@Override
public int compare(Procedure lhs, Procedure rhs) {
@@ -58,7 +60,7 @@
}
});
- List locks = master.listLocks();
+ List lockedResources = master.getLocks();
%>
@@ -118,17 +120,19 @@