diff --git a/bin/hbase b/bin/hbase
index 218ba16fce3..79cf4903693 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -118,6 +118,7 @@ if [ $# = 0 ]; then
echo " rowcounter Run RowCounter tool"
echo " cellcounter Run CellCounter tool"
echo " pre-upgrade Run Pre-Upgrade validator tool"
+ echo " hbtop Run HBTop tool"
echo " CLASSNAME Run the class named CLASSNAME"
exit 1
fi
@@ -221,7 +222,7 @@ if [ "${INTERNAL_CLASSPATH}" != "true" ]; then
done
# If command can use our shaded client, use it
- declare -a commands_in_client_jar=("classpath" "version")
+ declare -a commands_in_client_jar=("classpath" "version" "hbtop")
for c in "${commands_in_client_jar[@]}"; do
if [ "${COMMAND}" = "${c}" ]; then
if [ -n "${HADOOP_IN_PATH}" ] && [ -f "${HADOOP_IN_PATH}" ]; then
@@ -628,6 +629,24 @@ elif [ "$COMMAND" = "pre-upgrade" ] ; then
CLASS='org.apache.hadoop.hbase.tool.PreUpgradeValidator'
elif [ "$COMMAND" = "completebulkload" ] ; then
CLASS='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool'
+elif [ "$COMMAND" = "hbtop" ] ; then
+ CLASS='org.apache.hadoop.hbase.hbtop.HBTop'
+ if [ -n "${shaded_jar}" ] ; then
+ for f in "${HBASE_HOME}"/lib/hbase-hbtop*.jar; do
+ if [ -f "${f}" ]; then
+ CLASSPATH="${CLASSPATH}:${f}"
+ break
+ fi
+ done
+ for f in "${HBASE_HOME}"/lib/commons-lang3*.jar; do
+ if [ -f "${f}" ]; then
+ CLASSPATH="${CLASSPATH}:${f}"
+ break
+ fi
+ done
+ fi
+
+ HBASE_OPTS="${HBASE_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties"
else
CLASS=$COMMAND
fi
diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties
new file mode 100644
index 00000000000..831ee18e70a
--- /dev/null
+++ b/conf/log4j-hbtop.properties
@@ -0,0 +1,27 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+log4j.rootLogger=WARN,console
+log4j.threshold=WARN
+
+# console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
+
+# ZooKeeper will still put stuff at WARN
+log4j.logger.org.apache.zookeeper=ERROR
\ No newline at end of file
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 1cadfa9bb39..0920949f623 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -305,6 +305,10 @@
org.apache.hbase
hbase-zookeeper
+
+ org.apache.hbase
+ hbase-hbtop
+
jline
jline
diff --git a/hbase-hbtop/README.md b/hbase-hbtop/README.md
new file mode 100644
index 00000000000..41c2357761b
--- /dev/null
+++ b/hbase-hbtop/README.md
@@ -0,0 +1,248 @@
+
+
+# hbtop
+
+## Overview
+
+`hbtop` is a real-time monitoring tool for HBase like Unix's top command.
+It can display summary information as well as metrics per Region/Namespace/Table/RegionServer.
+In this tool, you can see the metrics sorted by a selected field and filter the metrics to see only metrics you really want to see.
+Also, with the drill-down feature, you can find hot regions easily in a top-down manner.
+
+## Usage
+
+You can run hbtop with the following command:
+
+```
+$ hbase hbtop
+```
+
+In this case, the values of `hbase.client.zookeeper.quorum` and `zookeeper.znode.parent` in `hbase-site.xml` in the classpath or the default values of them are used to connect.
+
+Or, you can specify your own zookeeper quorum and znode parent as follows:
+
+```
+$ hbase hbtop -Dhbase.client.zookeeper.quorum= -Dzookeeper.znode.parent=
+```
+
+![Top screen](img/top_screen.gif "Top screen")
+
+The top screen consists of a summary part and of a metrics part.
+In the summary part, you can see `HBase Version`, `Cluster ID`, `The number of region servers`, `Region count`, `Average Cluster Load` and `Aggregated Request/s`.
+In the metrics part, you can see metrics per Region/Namespace/Table/RegionServer depending on the selected mode.
+The top screen is refreshed in a certain period – 3 seconds by default.
+
+### Scrolling metric records
+
+You can scroll the metric records in the metrics part.
+
+![Scrolling metric records](img/scrolling_metric_records.gif "Scrolling metric records")
+
+### Command line arguments
+
+| Argument | Description |
+|---|---|
+| -d,--delay <arg> | The refresh delay (in seconds); default is 3 seconds |
+| -h,--help | Print usage; for help while the tool is running press `h` key |
+| -m,--mode <arg> | The mode; `n` (Namespace)|`t` (Table)|r (Region)|`s` (RegionServer), default is `r` (Region) |
+
+### Modes
+
+There are the following 4 modes in hbtop:
+
+| Mode | Description |
+|---|---|
+| Region | Showing metric records per region |
+| Namespace | Showing metric records per namespace |
+| Table | Showing metric records per table |
+| RegionServer | Showing metric records per region server |
+
+#### Region mode
+
+In Region mode, the default sort field is `#REQ/S`.
+
+The fields in this mode are as follows:
+
+| Field | Description | Displayed by default |
+|---|---|---|
+| RNAME | Region Name | false |
+| NAMESPACE | Namespace Name | true |
+| TABLE | Table Name | true |
+| SCODE | Start Code | false |
+| REPID | Replica ID | false |
+| REGION | Encoded Region Name | true |
+| RS | Short Region Server Name | true |
+| LRS | Long Region Server Name | false |
+| #REQ/S | Request Count per second | true |
+| #READ/S | Read Request Count per second | true |
+| #FREAD/S | Filtered Read Request Count per second | true |
+| #WRITE/S | Write Request Count per second | true |
+| SF | StoreFile Size | true |
+| USF | Uncompressed StoreFile Size | false |
+| #SF | Number of StoreFiles | true |
+| MEMSTORE | MemStore Size | true |
+| LOCALITY | Block Locality | true |
+| SKEY | Start Key | false |
+| #COMPingCELL | Compacting Cell Count | false |
+| #COMPedCELL | Compacted Cell Count | false |
+| %COMP | Compaction Progress | false |
+| LASTMCOMP | Last Major Compaction Time | false |
+
+#### Namespace mode
+
+In Namespace mode, the default sort field is `#REQ/S`.
+
+The fields in this mode are as follows:
+
+| Field | Description | Displayed by default |
+|---|---|---|
+| NAMESPACE | Namespace Name | true |
+| #REGION | Region Count | true |
+| #REQ/S | Request Count per second | true |
+| #READ/S | Read Request Count per second | true |
+| #FREAD/S | Filtered Read Request Count per second | true |
+| #WRITE/S | Write Request Count per second | true |
+| SF | StoreFile Size | true |
+| USF | Uncompressed StoreFile Size | false |
+| #SF | Number of StoreFiles | true |
+| MEMSTORE | MemStore Size | true |
+
+#### Table mode
+
+In Table mode, the default sort field is `#REQ/S`.
+
+The fields in this mode are as follows:
+
+| Field | Description | Displayed by default |
+|---|---|---|
+| NAMESPACE | Namespace Name | true |
+| TABLE | Table Name | true |
+| #REGION | Region Count | true |
+| #REQ/S | Request Count per second | true |
+| #READ/S | Read Request Count per second | true |
+| #FREAD/S | Filtered Read Request Count per second | true |
+| #WRITE/S | Write Request Count per second | true |
+| SF | StoreFile Size | true |
+| USF | Uncompressed StoreFile Size | false |
+| #SF | Number of StoreFiles | true |
+| MEMSTORE | MemStore Size | true |
+
+#### RegionServer mode
+
+In RegionServer mode, the default sort field is `#REQ/S`.
+
+The fields in this mode are as follows:
+
+| Field | Description | Displayed by default |
+|---|---|---|
+| RS | Short Region Server Name | true |
+| LRS | Long Region Server Name | false |
+| #REGION | Region Count | true |
+| #REQ/S | Request Count per second | true |
+| #READ/S | Read Request Count per second | true |
+| #FREAD/S | Filtered Read Request Count per second | true |
+| #WRITE/S | Write Request Count per second | true |
+| SF | StoreFile Size | true |
+| USF | Uncompressed StoreFile Size | false |
+| #SF | Number of StoreFiles | true |
+| MEMSTORE | MemStore Size | true |
+| UHEAP | Used Heap Size | true |
+| MHEAP | Max Heap Size | true |
+
+### Changing mode
+
+You can change mode by pressing `m` key in the top screen.
+
+![Changing mode](img/changing_mode.gif "Changing mode")
+
+### Changing the refresh delay
+
+You can change the refresh by pressing `d` key in the top screen.
+
+![Changing the refresh delay](img/changing_refresh_delay.gif "Changing the refresh delay")
+
+### Changing the displayed fields
+
+You can move to the field screen by pressing `f` key in the top screen. In the fields screen, you can change the displayed fields by choosing a field and pressing `d` key or `space` key.
+
+![Changing the displayed fields](img/changing_displayed_fields.gif "Changing the displayed fields")
+
+### Changing the sort field
+
+You can move to the fields screen by pressing `f` key in the top screen. In the field screen, you can change the sort field by choosing a field and pressing `s`. Also, you can change the sort order (ascending or descending) by pressing `R` key.
+
+![Changing the sort field](img/changing_sort_field.gif "Changing the sort field")
+
+
+### Changing the order of the fields
+
+You can move to the fields screen by pressing `f` key in the top screen. In the field screen, you can change the order of the fields.
+
+![Changing the order of the fields](img/changing_order_of_fields.gif "Changing the sort field")
+
+### Filters
+
+You can filter the metric records with the filter feature. We can add filters by pressing `o` key for ignoring case or `O` key for case sensitive.
+
+![Adding filters](img/adding_filters.gif "Adding filters")
+
+The syntax is as follows:
+```
+
+```
+
+For example, we can add filters like the following:
+```
+NAMESPACE==default
+REQ/S>1000
+```
+
+The operators we can specify are as follows:
+
+| Operator | Description |
+|---|---|
+| = | Partial match |
+| == | Exact match |
+| > | Greater than |
+| >= | Greater than or equal to |
+| < | Less than |
+| <= | Less than and equal to |
+
+You can see the current filters by pressing `^o` key and clear them by pressing `=` key.
+
+![Showing and clearing filters](img/showing_and_clearing_filters.gif "Showing and clearing filters")
+
+### Drilling down
+
+You can drill down the metric record by choosing a metric record that you want to drill down and pressing `i` key in the top screen. With this feature, you can find hot regions easily in a top-down manner.
+
+![Drilling down](img/driling_down.gif "Drilling down")
+
+### Help screen
+
+You can see the help screen by pressing `h` key in the top screen.
+
+![Help screen](img/help_screen.gif "Help screen")
+
+## Others
+
+### How hbtop gets the metrics data
+
+hbtop gets the metrics from ClusterMetrics which is returned as the result of a call to Admin#getClusterMetrics() on the current HMaster. To add metrics to hbtop, they will need to be exposed via ClusterMetrics.
diff --git a/hbase-hbtop/img/adding_filters.gif b/hbase-hbtop/img/adding_filters.gif
new file mode 100644
index 00000000000..4c33d530666
Binary files /dev/null and b/hbase-hbtop/img/adding_filters.gif differ
diff --git a/hbase-hbtop/img/changing_displayed_fields.gif b/hbase-hbtop/img/changing_displayed_fields.gif
new file mode 100644
index 00000000000..5d91a096627
Binary files /dev/null and b/hbase-hbtop/img/changing_displayed_fields.gif differ
diff --git a/hbase-hbtop/img/changing_mode.gif b/hbase-hbtop/img/changing_mode.gif
new file mode 100644
index 00000000000..bf1cd5f5f6c
Binary files /dev/null and b/hbase-hbtop/img/changing_mode.gif differ
diff --git a/hbase-hbtop/img/changing_order_of_fields.gif b/hbase-hbtop/img/changing_order_of_fields.gif
new file mode 100644
index 00000000000..53b56610ca9
Binary files /dev/null and b/hbase-hbtop/img/changing_order_of_fields.gif differ
diff --git a/hbase-hbtop/img/changing_refresh_delay.gif b/hbase-hbtop/img/changing_refresh_delay.gif
new file mode 100644
index 00000000000..e22b7eba6ad
Binary files /dev/null and b/hbase-hbtop/img/changing_refresh_delay.gif differ
diff --git a/hbase-hbtop/img/changing_sort_field.gif b/hbase-hbtop/img/changing_sort_field.gif
new file mode 100644
index 00000000000..f55f19654ab
Binary files /dev/null and b/hbase-hbtop/img/changing_sort_field.gif differ
diff --git a/hbase-hbtop/img/driling_down.gif b/hbase-hbtop/img/driling_down.gif
new file mode 100644
index 00000000000..1736bcdf654
Binary files /dev/null and b/hbase-hbtop/img/driling_down.gif differ
diff --git a/hbase-hbtop/img/help_screen.gif b/hbase-hbtop/img/help_screen.gif
new file mode 100644
index 00000000000..13313f9ce7c
Binary files /dev/null and b/hbase-hbtop/img/help_screen.gif differ
diff --git a/hbase-hbtop/img/scrolling_metric_records.gif b/hbase-hbtop/img/scrolling_metric_records.gif
new file mode 100644
index 00000000000..4a41706274e
Binary files /dev/null and b/hbase-hbtop/img/scrolling_metric_records.gif differ
diff --git a/hbase-hbtop/img/showing_and_clearing_filters.gif b/hbase-hbtop/img/showing_and_clearing_filters.gif
new file mode 100644
index 00000000000..3e5342c1177
Binary files /dev/null and b/hbase-hbtop/img/showing_and_clearing_filters.gif differ
diff --git a/hbase-hbtop/img/top_screen.gif b/hbase-hbtop/img/top_screen.gif
new file mode 100644
index 00000000000..0eca8de2000
Binary files /dev/null and b/hbase-hbtop/img/top_screen.gif differ
diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml
new file mode 100644
index 00000000000..73f98c0a970
--- /dev/null
+++ b/hbase-hbtop/pom.xml
@@ -0,0 +1,77 @@
+
+
+
+ 4.0.0
+
+ hbase-build-configuration
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ../hbase-build-configuration
+
+ hbase-hbtop
+ Apache HBase - HBTop
+ A real-time monitoring tool for HBase like Unix's top command
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+
+
+
+
+
+ org.apache.hbase
+ hbase-shaded-client
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ com.github.stephenc.findbugs
+ findbugs-annotations
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.mockito
+ mockito-core
+ test
+
+
+ org.hamcrest
+ hamcrest-core
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ test
+
+
+
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/HBTop.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/HBTop.java
new file mode 100644
index 00000000000..d657a370651
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/HBTop.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop;
+
+import java.util.Objects;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
+
+
+/**
+ * A real-time monitoring tool for HBase like Unix top command.
+ */
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+public class HBTop extends Configured implements Tool {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(HBTop.class);
+
+ public HBTop() {
+ this(HBaseConfiguration.create());
+ }
+
+ public HBTop(Configuration conf) {
+ super(Objects.requireNonNull(conf));
+ }
+
+ @Override
+ public int run(String[] args) throws Exception {
+ long initialRefreshDelay = 3 * 1000;
+ Mode initialMode = Mode.REGION;
+ try {
+ // Command line options
+ Options opts = new Options();
+ opts.addOption("h", "help", false,
+ "Print usage; for help while the tool is running press 'h'");
+ opts.addOption("d", "delay", true,
+ "The refresh delay (in seconds); default is 3 seconds");
+ opts.addOption("m", "mode", true,
+ "The mode; n (Namespace)|t (Table)|r (Region)|s (RegionServer)"
+ + ", default is r (Region)");
+
+ CommandLine commandLine = new DefaultParser().parse(opts, args);
+
+ if (commandLine.hasOption("help")) {
+ printUsage(opts);
+ return 0;
+ }
+
+ if (commandLine.hasOption("delay")) {
+ int delay = 0;
+ try {
+ delay = Integer.parseInt(commandLine.getOptionValue("delay"));
+ } catch (NumberFormatException ignored) {
+ }
+
+ if (delay < 1) {
+ LOGGER.warn("Delay set too low or invalid, using default");
+ } else {
+ initialRefreshDelay = delay * 1000;
+ }
+ }
+
+ if (commandLine.hasOption("mode")) {
+ String mode = commandLine.getOptionValue("mode");
+ switch (mode) {
+ case "n":
+ initialMode = Mode.NAMESPACE;
+ break;
+
+ case "t":
+ initialMode = Mode.TABLE;
+ break;
+
+ case "r":
+ initialMode = Mode.REGION;
+ break;
+
+ case "s":
+ initialMode = Mode.REGION_SERVER;
+ break;
+
+ default:
+ LOGGER.warn("Mode set invalid, using default");
+ break;
+ }
+ }
+ } catch (Exception e) {
+ LOGGER.error("Unable to parse options", e);
+ return 1;
+ }
+
+ try (Screen screen = new Screen(getConf(), initialRefreshDelay, initialMode)) {
+ screen.run();
+ }
+
+ return 0;
+ }
+
+ private void printUsage(Options opts) {
+ new HelpFormatter().printHelp("hbase hbtop [opts] [-D]*", opts);
+ System.out.println("");
+ System.out.println(" Note: -D properties will be applied to the conf used.");
+ System.out.println(" For example:");
+ System.out.println(" -Dhbase.client.zookeeper.quorum=");
+ System.out.println(" -Dzookeeper.znode.parent=");
+ System.out.println("");
+ }
+
+ public static void main(String[] args) throws Exception {
+ int res = ToolRunner.run(new HBTop(), args);
+ System.exit(res);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Record.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Record.java
new file mode 100644
index 00000000000..577172a38cb
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Record.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop;
+
+import edu.umd.cs.findbugs.annotations.NonNull;
+import java.util.AbstractMap;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Stream;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldValue;
+import org.apache.hadoop.hbase.hbtop.field.FieldValueType;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
+
+/**
+ * Represents a record of the metrics in the top screen.
+ */
+@InterfaceAudience.Private
+public final class Record implements Map {
+
+ private final ImmutableMap values;
+
+ public final static class Entry extends AbstractMap.SimpleImmutableEntry {
+ private Entry(Field key, FieldValue value) {
+ super(key, value);
+ }
+ }
+
+ public final static class Builder {
+
+ private final ImmutableMap.Builder builder;
+
+ private Builder() {
+ builder = ImmutableMap.builder();
+ }
+
+ public Builder put(Field key, Object value) {
+ builder.put(key, key.newValue(value));
+ return this;
+ }
+
+ public Builder put(Field key, FieldValue value) {
+ builder.put(key, value);
+ return this;
+ }
+
+ public Builder put(Entry entry) {
+ builder.put(entry);
+ return this;
+ }
+
+ public Builder putAll(Map map) {
+ builder.putAll(map);
+ return this;
+ }
+
+ public Record build() {
+ return new Record(builder.build());
+ }
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ public static Entry entry(Field field, Object value) {
+ return new Entry(field, field.newValue(value));
+ }
+
+ public static Entry entry(Field field, FieldValue value) {
+ return new Entry(field, value);
+ }
+
+ public static Record ofEntries(Entry... entries) {
+ return ofEntries(Stream.of(entries));
+ }
+
+ public static Record ofEntries(Stream entries) {
+ return entries.collect(Record::builder, Builder::put, (r1, r2) -> {}).build();
+ }
+
+ private Record(ImmutableMap values) {
+ this.values = values;
+ }
+
+ @Override
+ public int size() {
+ return values.size();
+ }
+
+ @Override
+ public boolean isEmpty() {
+ return values.isEmpty();
+ }
+
+ @Override
+ public boolean containsKey(Object key) {
+ return values.containsKey(key);
+ }
+
+ @Override
+ public boolean containsValue(Object value) {
+ return values.containsValue(value);
+ }
+
+ @Override
+ public FieldValue get(Object key) {
+ return values.get(key);
+ }
+
+ @Override
+ public FieldValue put(Field key, FieldValue value) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public FieldValue remove(Object key) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void putAll(@NonNull Map extends Field, ? extends FieldValue> m) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void clear() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ @NonNull
+ public Set keySet() {
+ return values.keySet();
+ }
+
+ @Override
+ @NonNull
+ public Collection values() {
+ return values.values();
+ }
+
+ @Override
+ @NonNull
+ public Set> entrySet() {
+ return values.entrySet();
+ }
+
+ public Record combine(Record o) {
+ return ofEntries(values.keySet().stream()
+ .map(k -> {
+ if (k.getFieldValueType() == FieldValueType.STRING) {
+ return entry(k, values.get(k));
+ }
+ return entry(k, values.get(k).plus(o.values.get(k)));
+ }));
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/RecordFilter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/RecordFilter.java
new file mode 100644
index 00000000000..aaef965c4e9
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/RecordFilter.java
@@ -0,0 +1,336 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldValue;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents a filter that's filtering the metric {@link Record}s.
+ */
+@InterfaceAudience.Private
+public final class RecordFilter {
+
+ private enum Operator {
+ EQUAL("="),
+ DOUBLE_EQUALS("=="),
+ GREATER(">"),
+ GREATER_OR_EQUAL(">="),
+ LESS("<"),
+ LESS_OR_EQUAL("<=");
+
+ private final String operator;
+
+ Operator(String operator) {
+ this.operator = operator;
+ }
+
+ @Override
+ public String toString() {
+ return operator;
+ }
+ }
+
+ public static RecordFilter parse(String filterString, boolean ignoreCase) {
+ return parse(filterString, Arrays.asList(Field.values()), ignoreCase);
+ }
+
+ public static RecordFilter parse(String filterString, List fields, boolean ignoreCase) {
+ int index = 0;
+
+ boolean not = isNot(filterString);
+ if (not) {
+ index += 1;
+ }
+
+ StringBuilder fieldString = new StringBuilder();
+ while (filterString.length() > index && filterString.charAt(index) != '<'
+ && filterString.charAt(index) != '>' && filterString.charAt(index) != '=') {
+ fieldString.append(filterString.charAt(index++));
+ }
+
+ if (fieldString.length() == 0 || filterString.length() == index) {
+ return null;
+ }
+
+ Field field = getField(fields, fieldString.toString());
+ if (field == null) {
+ return null;
+ }
+
+ StringBuilder operatorString = new StringBuilder();
+ while (filterString.length() > index && (filterString.charAt(index) == '<' ||
+ filterString.charAt(index) == '>' || filterString.charAt(index) == '=')) {
+ operatorString.append(filterString.charAt(index++));
+ }
+
+ Operator operator = getOperator(operatorString.toString());
+ if (operator == null) {
+ return null;
+ }
+
+ String value = filterString.substring(index);
+ FieldValue fieldValue = getFieldValue(field, value);
+ if (fieldValue == null) {
+ return null;
+ }
+
+ return new RecordFilter(ignoreCase, not, field, operator, fieldValue);
+ }
+
+ private static FieldValue getFieldValue(Field field, String value) {
+ try {
+ return field.newValue(value);
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+ private static boolean isNot(String filterString) {
+ return filterString.startsWith("!");
+ }
+
+ private static Field getField(List fields, String fieldString) {
+ for (Field f : fields) {
+ if (f.getHeader().equals(fieldString)) {
+ return f;
+ }
+ }
+ return null;
+ }
+
+ private static Operator getOperator(String operatorString) {
+ for (Operator o : Operator.values()) {
+ if (operatorString.equals(o.toString())) {
+ return o;
+ }
+ }
+ return null;
+ }
+
+ private final boolean ignoreCase;
+ private final boolean not;
+ private final Field field;
+ private final Operator operator;
+ private final FieldValue value;
+
+ private RecordFilter(boolean ignoreCase, boolean not, Field field, Operator operator,
+ FieldValue value) {
+ this.ignoreCase = ignoreCase;
+ this.not = not;
+ this.field = Objects.requireNonNull(field);
+ this.operator = Objects.requireNonNull(operator);
+ this.value = Objects.requireNonNull(value);
+ }
+
+ public boolean execute(Record record) {
+ FieldValue fieldValue = record.get(field);
+ if (fieldValue == null) {
+ return false;
+ }
+
+ if (operator == Operator.EQUAL) {
+ boolean ret;
+ if (ignoreCase) {
+ ret = fieldValue.asString().toLowerCase().contains(value.asString().toLowerCase());
+ } else {
+ ret = fieldValue.asString().contains(value.asString());
+ }
+ return not != ret;
+ }
+
+ int compare = ignoreCase ?
+ fieldValue.compareToIgnoreCase(value) : fieldValue.compareTo(value);
+
+ boolean ret;
+ switch (operator) {
+ case DOUBLE_EQUALS:
+ ret = compare == 0;
+ break;
+
+ case GREATER:
+ ret = compare > 0;
+ break;
+
+ case GREATER_OR_EQUAL:
+ ret = compare >= 0;
+ break;
+
+ case LESS:
+ ret = compare < 0;
+ break;
+
+ case LESS_OR_EQUAL:
+ ret = compare <= 0;
+ break;
+
+ default:
+ throw new AssertionError();
+ }
+ return not != ret;
+ }
+
+ @Override
+ public String toString() {
+ return (not ? "!" : "") + field.getHeader() + operator + value.asString();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof RecordFilter)) {
+ return false;
+ }
+ RecordFilter filter = (RecordFilter) o;
+ return ignoreCase == filter.ignoreCase && not == filter.not && field == filter.field
+ && operator == filter.operator && value.equals(filter.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(ignoreCase, not, field, operator, value);
+ }
+
+ /*
+ * For FilterBuilder
+ */
+ public static FilterBuilder newBuilder(Field field) {
+ return new FilterBuilder(field, false);
+ }
+
+ public static FilterBuilder newBuilder(Field field, boolean ignoreCase) {
+ return new FilterBuilder(field, ignoreCase);
+ }
+
+ public static final class FilterBuilder {
+ private final Field field;
+ private final boolean ignoreCase;
+
+ private FilterBuilder(Field field, boolean ignoreCase) {
+ this.field = Objects.requireNonNull(field);
+ this.ignoreCase = ignoreCase;
+ }
+
+ public RecordFilter equal(FieldValue value) {
+ return newFilter(false, Operator.EQUAL, value);
+ }
+
+ public RecordFilter equal(Object value) {
+ return equal(field.newValue(value));
+ }
+
+ public RecordFilter notEqual(FieldValue value) {
+ return newFilter(true, Operator.EQUAL, value);
+ }
+
+ public RecordFilter notEqual(Object value) {
+ return notEqual(field.newValue(value));
+ }
+
+ public RecordFilter doubleEquals(FieldValue value) {
+ return newFilter(false, Operator.DOUBLE_EQUALS, value);
+ }
+
+ public RecordFilter doubleEquals(Object value) {
+ return doubleEquals(field.newValue(value));
+ }
+
+ public RecordFilter notDoubleEquals(FieldValue value) {
+ return newFilter(true, Operator.DOUBLE_EQUALS, value);
+ }
+
+ public RecordFilter notDoubleEquals(Object value) {
+ return notDoubleEquals(field.newValue(value));
+ }
+
+ public RecordFilter greater(FieldValue value) {
+ return newFilter(false, Operator.GREATER, value);
+ }
+
+ public RecordFilter greater(Object value) {
+ return greater(field.newValue(value));
+ }
+
+ public RecordFilter notGreater(FieldValue value) {
+ return newFilter(true, Operator.GREATER, value);
+ }
+
+ public RecordFilter notGreater(Object value) {
+ return notGreater(field.newValue(value));
+ }
+
+ public RecordFilter greaterOrEqual(FieldValue value) {
+ return newFilter(false, Operator.GREATER_OR_EQUAL, value);
+ }
+
+ public RecordFilter greaterOrEqual(Object value) {
+ return greaterOrEqual(field.newValue(value));
+ }
+
+ public RecordFilter notGreaterOrEqual(FieldValue value) {
+ return newFilter(true, Operator.GREATER_OR_EQUAL, value);
+ }
+
+ public RecordFilter notGreaterOrEqual(Object value) {
+ return notGreaterOrEqual(field.newValue(value));
+ }
+
+ public RecordFilter less(FieldValue value) {
+ return newFilter(false, Operator.LESS, value);
+ }
+
+ public RecordFilter less(Object value) {
+ return less(field.newValue(value));
+ }
+
+ public RecordFilter notLess(FieldValue value) {
+ return newFilter(true, Operator.LESS, value);
+ }
+
+ public RecordFilter notLess(Object value) {
+ return notLess(field.newValue(value));
+ }
+
+ public RecordFilter lessOrEqual(FieldValue value) {
+ return newFilter(false, Operator.LESS_OR_EQUAL, value);
+ }
+
+ public RecordFilter lessOrEqual(Object value) {
+ return lessOrEqual(field.newValue(value));
+ }
+
+ public RecordFilter notLessOrEqual(FieldValue value) {
+ return newFilter(true, Operator.LESS_OR_EQUAL, value);
+ }
+
+ public RecordFilter notLessOrEqual(Object value) {
+ return notLessOrEqual(field.newValue(value));
+ }
+
+ private RecordFilter newFilter(boolean not, Operator operator, FieldValue value) {
+ return new RecordFilter(ignoreCase, not, field, operator, value);
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/Field.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/Field.java
new file mode 100644
index 00000000000..6e5f66f6244
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/Field.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.field;
+
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents fields that are displayed in the top screen.
+ */
+@InterfaceAudience.Private
+public enum Field {
+ REGION_NAME("RNAME", "Region Name", true, true, FieldValueType.STRING),
+ NAMESPACE("NAMESPACE", "Namespace Name", true, true, FieldValueType.STRING),
+ TABLE("TABLE", "Table Name", true, true, FieldValueType.STRING),
+ START_CODE("SCODE", "Start Code", false, true, FieldValueType.STRING),
+ REPLICA_ID("REPID", "Replica ID", false, false, FieldValueType.STRING),
+ REGION("REGION", "Encoded Region Name", false, true, FieldValueType.STRING),
+ REGION_SERVER("RS", "Short Region Server Name", true, true, FieldValueType.STRING),
+ LONG_REGION_SERVER("LRS", "Long Region Server Name", true, true, FieldValueType.STRING),
+ REQUEST_COUNT_PER_SECOND("#REQ/S", "Request Count per second", false, false,
+ FieldValueType.LONG),
+ READ_REQUEST_COUNT_PER_SECOND("#READ/S", "Read Request Count per second", false, false,
+ FieldValueType.LONG),
+ FILTERED_READ_REQUEST_COUNT_PER_SECOND("#FREAD/S", "Filtered Read Request Count per second",
+ false, false, FieldValueType.LONG),
+ WRITE_REQUEST_COUNT_PER_SECOND("#WRITE/S", "Write Request Count per second", false, false,
+ FieldValueType.LONG),
+ STORE_FILE_SIZE("SF", "StoreFile Size", false, false, FieldValueType.SIZE),
+ UNCOMPRESSED_STORE_FILE_SIZE("USF", "Uncompressed StoreFile Size", false, false,
+ FieldValueType.SIZE),
+ NUM_STORE_FILES("#SF", "Number of StoreFiles", false, false, FieldValueType.INTEGER),
+ MEM_STORE_SIZE("MEMSTORE", "MemStore Size", false, false, FieldValueType.SIZE),
+ LOCALITY("LOCALITY", "Block Locality", false, false, FieldValueType.FLOAT),
+ START_KEY("SKEY", "Start Key", true, true, FieldValueType.STRING),
+ COMPACTING_CELL_COUNT("#COMPingCELL", "Compacting Cell Count", false, false,
+ FieldValueType.LONG),
+ COMPACTED_CELL_COUNT("#COMPedCELL", "Compacted Cell Count", false, false, FieldValueType.LONG),
+ COMPACTION_PROGRESS("%COMP", "Compaction Progress", false, false, FieldValueType.PERCENT),
+ LAST_MAJOR_COMPACTION_TIME("LASTMCOMP", "Last Major Compaction Time", false, true,
+ FieldValueType.STRING),
+ REGION_COUNT("#REGION", "Region Count", false, false, FieldValueType.INTEGER),
+ USED_HEAP_SIZE("UHEAP", "Used Heap Size", false, false, FieldValueType.SIZE),
+ MAX_HEAP_SIZE("MHEAP", "Max Heap Size", false, false, FieldValueType.SIZE);
+
+ private final String header;
+ private final String description;
+ private final boolean autoAdjust;
+ private final boolean leftJustify;
+ private final FieldValueType fieldValueType;
+
+ Field(String header, String description, boolean autoAdjust, boolean leftJustify,
+ FieldValueType fieldValueType) {
+ this.header = Objects.requireNonNull(header);
+ this.description = Objects.requireNonNull(description);
+ this.autoAdjust = autoAdjust;
+ this.leftJustify = leftJustify;
+ this.fieldValueType = Objects.requireNonNull(fieldValueType);
+ }
+
+ public FieldValue newValue(Object value) {
+ return new FieldValue(value, fieldValueType);
+ }
+
+ public String getHeader() {
+ return header;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public boolean isAutoAdjust() {
+ return autoAdjust;
+ }
+
+ public boolean isLeftJustify() {
+ return leftJustify;
+ }
+
+ public FieldValueType getFieldValueType() {
+ return fieldValueType;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldInfo.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldInfo.java
new file mode 100644
index 00000000000..3f0e5f7ad1d
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldInfo.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.field;
+
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Information about a field.
+ *
+ * This has a {@link Field} itself and additional information (e.g. {@code defaultLength} and
+ * {@code displayByDefault}). This additional information is different between the
+ * {@link org.apache.hadoop.hbase.hbtop.mode.Mode}s even when the field is the same. That's why the
+ * additional information is separated from {@link Field}.
+ */
+@InterfaceAudience.Private
+public class FieldInfo {
+ private final Field field;
+ private final int defaultLength;
+ private final boolean displayByDefault;
+
+ public FieldInfo(Field field, int defaultLength, boolean displayByDefault) {
+ this.field = Objects.requireNonNull(field);
+ this.defaultLength = defaultLength;
+ this.displayByDefault = displayByDefault;
+ }
+
+ public Field getField() {
+ return field;
+ }
+
+ public int getDefaultLength() {
+ return defaultLength;
+ }
+
+ public boolean isDisplayByDefault() {
+ return displayByDefault;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValue.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValue.java
new file mode 100644
index 00000000000..6150df93c5c
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValue.java
@@ -0,0 +1,284 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.field;
+
+import edu.umd.cs.findbugs.annotations.NonNull;
+import java.util.Objects;
+import org.apache.hadoop.hbase.Size;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents a value of a field.
+ *
+ * The type of a value is defined by {@link FieldValue}.
+ */
+@InterfaceAudience.Private
+public final class FieldValue implements Comparable {
+
+ private final Object value;
+ private final FieldValueType type;
+
+ FieldValue(Object value, FieldValueType type) {
+ Objects.requireNonNull(value);
+ this.type = Objects.requireNonNull(type);
+
+ switch (type) {
+ case STRING:
+ if (value instanceof String) {
+ this.value = value;
+ break;
+ }
+ throw new IllegalArgumentException("invalid type");
+
+ case INTEGER:
+ if (value instanceof Integer) {
+ this.value = value;
+ break;
+ } else if (value instanceof String) {
+ this.value = Integer.valueOf((String) value);
+ break;
+ }
+ throw new IllegalArgumentException("invalid type");
+
+ case LONG:
+ if (value instanceof Long) {
+ this.value = value;
+ break;
+ } else if (value instanceof String) {
+ this.value = Long.valueOf((String) value);
+ break;
+ }
+ throw new IllegalArgumentException("invalid type");
+
+ case FLOAT:
+ if (value instanceof Float) {
+ this.value = value;
+ break;
+ } else if (value instanceof String) {
+ this.value = Float.valueOf((String) value);
+ break;
+ }
+ throw new IllegalArgumentException("invalid type");
+
+ case SIZE:
+ if (value instanceof Size) {
+ this.value = optimizeSize((Size) value);
+ break;
+ } else if (value instanceof String) {
+ this.value = optimizeSize(parseSizeString((String) value));
+ break;
+ }
+ throw new IllegalArgumentException("invalid type");
+
+ case PERCENT:
+ if (value instanceof Float) {
+ this.value = value;
+ break;
+ } else if (value instanceof String) {
+ this.value = parsePercentString((String) value);
+ break;
+ }
+ throw new IllegalArgumentException("invalid type");
+
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ private Size optimizeSize(Size size) {
+ if (size.get(Size.Unit.BYTE) < 1024d) {
+ return size.getUnit() == Size.Unit.BYTE ?
+ size : new Size(size.get(Size.Unit.BYTE), Size.Unit.BYTE);
+ } else if (size.get(Size.Unit.KILOBYTE) < 1024d) {
+ return size.getUnit() == Size.Unit.KILOBYTE ?
+ size : new Size(size.get(Size.Unit.KILOBYTE), Size.Unit.KILOBYTE);
+ } else if (size.get(Size.Unit.MEGABYTE) < 1024d) {
+ return size.getUnit() == Size.Unit.MEGABYTE ?
+ size : new Size(size.get(Size.Unit.MEGABYTE), Size.Unit.MEGABYTE);
+ } else if (size.get(Size.Unit.GIGABYTE) < 1024d) {
+ return size.getUnit() == Size.Unit.GIGABYTE ?
+ size : new Size(size.get(Size.Unit.GIGABYTE), Size.Unit.GIGABYTE);
+ } else if (size.get(Size.Unit.TERABYTE) < 1024d) {
+ return size.getUnit() == Size.Unit.TERABYTE ?
+ size : new Size(size.get(Size.Unit.TERABYTE), Size.Unit.TERABYTE);
+ }
+ return size.getUnit() == Size.Unit.PETABYTE ?
+ size : new Size(size.get(Size.Unit.PETABYTE), Size.Unit.PETABYTE);
+ }
+
+ private Size parseSizeString(String sizeString) {
+ if (sizeString.length() < 3) {
+ throw new IllegalArgumentException("invalid size");
+ }
+
+ String valueString = sizeString.substring(0, sizeString.length() - 2);
+ String unitSimpleName = sizeString.substring(sizeString.length() - 2);
+ return new Size(Double.parseDouble(valueString), convertToUnit(unitSimpleName));
+ }
+
+ private Size.Unit convertToUnit(String unitSimpleName) {
+ for (Size.Unit unit: Size.Unit.values()) {
+ if (unitSimpleName.equals(unit.getSimpleName())) {
+ return unit;
+ }
+ }
+ throw new IllegalArgumentException("invalid size");
+ }
+
+ private Float parsePercentString(String percentString) {
+ if (percentString.endsWith("%")) {
+ percentString = percentString.substring(0, percentString.length() - 1);
+ }
+ return Float.valueOf(percentString);
+ }
+
+ public String asString() {
+ return toString();
+ }
+
+ public int asInt() {
+ return (Integer) value;
+ }
+
+ public long asLong() {
+ return (Long) value;
+ }
+
+ public float asFloat() {
+ return (Float) value;
+ }
+
+ public Size asSize() {
+ return (Size) value;
+ }
+
+ @Override
+ public String toString() {
+ switch (type) {
+ case STRING:
+ case INTEGER:
+ case LONG:
+ case FLOAT:
+ case SIZE:
+ return value.toString();
+
+ case PERCENT:
+ return String.format("%.2f", (Float) value) + "%";
+
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ @Override
+ public int compareTo(@NonNull FieldValue o) {
+ if (type != o.type) {
+ throw new IllegalArgumentException("invalid type");
+ }
+
+ switch (type) {
+ case STRING:
+ return ((String) value).compareTo((String) o.value);
+
+ case INTEGER:
+ return ((Integer) value).compareTo((Integer) o.value);
+
+ case LONG:
+ return ((Long) value).compareTo((Long) o.value);
+
+ case FLOAT:
+ case PERCENT:
+ return ((Float) value).compareTo((Float) o.value);
+
+ case SIZE:
+ return ((Size) value).compareTo((Size) o.value);
+
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof FieldValue)) {
+ return false;
+ }
+ FieldValue that = (FieldValue) o;
+ return value.equals(that.value) && type == that.type;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(value, type);
+ }
+
+ public FieldValue plus(FieldValue o) {
+ if (type != o.type) {
+ throw new IllegalArgumentException("invalid type");
+ }
+
+ switch (type) {
+ case STRING:
+ return new FieldValue(((String) value).concat((String) o.value), type);
+
+ case INTEGER:
+ return new FieldValue(((Integer) value) + ((Integer) o.value), type);
+
+ case LONG:
+ return new FieldValue(((Long) value) + ((Long) o.value), type);
+
+ case FLOAT:
+ case PERCENT:
+ return new FieldValue(((Float) value) + ((Float) o.value), type);
+
+ case SIZE:
+ Size size = (Size) value;
+ Size oSize = (Size) o.value;
+ Size.Unit unit = size.getUnit();
+ return new FieldValue(new Size(size.get(unit) + oSize.get(unit), unit), type);
+
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public int compareToIgnoreCase(FieldValue o) {
+ if (type != o.type) {
+ throw new IllegalArgumentException("invalid type");
+ }
+
+ switch (type) {
+ case STRING:
+ return ((String) value).compareToIgnoreCase((String) o.value);
+
+ case INTEGER:
+ case LONG:
+ case FLOAT:
+ case SIZE:
+ case PERCENT:
+ return compareTo(o);
+
+ default:
+ throw new AssertionError();
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValueType.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValueType.java
new file mode 100644
index 00000000000..e2edae87b80
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValueType.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.field;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents the type of a {@link FieldValue}.
+ */
+@InterfaceAudience.Private
+public enum FieldValueType {
+ STRING, INTEGER, LONG, FLOAT, SIZE, PERCENT
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/DrillDownInfo.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/DrillDownInfo.java
new file mode 100644
index 00000000000..de3d582fb9f
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/DrillDownInfo.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Information about drilling down.
+ *
+ * When drilling down, going to next {@link Mode} with initial {@link RecordFilter}s.
+ */
+@InterfaceAudience.Private
+public class DrillDownInfo {
+ private final Mode nextMode;
+ private final List initialFilters;
+
+ public DrillDownInfo(Mode nextMode, List initialFilters) {
+ this.nextMode = Objects.requireNonNull(nextMode);
+ this.initialFilters = Collections.unmodifiableList(new ArrayList<>(initialFilters));
+ }
+
+ public Mode getNextMode() {
+ return nextMode;
+ }
+
+ public List getInitialFilters() {
+ return initialFilters;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/Mode.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/Mode.java
new file mode 100644
index 00000000000..1290e6916cb
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/Mode.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.List;
+import java.util.Objects;
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents a display mode in the top screen.
+ */
+@InterfaceAudience.Private
+public enum Mode {
+ NAMESPACE("Namespace", "Record per Namespace", new NamespaceModeStrategy()),
+ TABLE("Table", "Record per Table", new TableModeStrategy()),
+ REGION("Region", "Record per Region", new RegionModeStrategy()),
+ REGION_SERVER("RegionServer", "Record per RegionServer", new RegionServerModeStrategy());
+
+ private final String header;
+ private final String description;
+ private final ModeStrategy modeStrategy;
+
+ Mode(String header, String description, ModeStrategy modeStrategy) {
+ this.header = Objects.requireNonNull(header);
+ this.description = Objects.requireNonNull(description);
+ this.modeStrategy = Objects.requireNonNull(modeStrategy);
+ }
+
+ public String getHeader() {
+ return header;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public List getRecords(ClusterMetrics clusterMetrics) {
+ return modeStrategy.getRecords(clusterMetrics);
+ }
+
+ public List getFieldInfos() {
+ return modeStrategy.getFieldInfos();
+ }
+
+ public Field getDefaultSortField() {
+ return modeStrategy.getDefaultSortField();
+ }
+
+ @Nullable
+ public DrillDownInfo drillDown(Record currentRecord) {
+ return modeStrategy.drillDown(currentRecord);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/ModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/ModeStrategy.java
new file mode 100644
index 00000000000..09fa297e303
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/ModeStrategy.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.List;
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * An interface for strategy logic for {@link Mode}.
+ */
+@InterfaceAudience.Private
+interface ModeStrategy {
+ List getFieldInfos();
+ Field getDefaultSortField();
+ List getRecords(ClusterMetrics clusterMetrics);
+ @Nullable DrillDownInfo drillDown(Record selectedRecord);
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeStrategy.java
new file mode 100644
index 00000000000..866f57e4ded
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeStrategy.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Implementation for {@link ModeStrategy} for Namespace Mode.
+ */
+@InterfaceAudience.Private
+public final class NamespaceModeStrategy implements ModeStrategy {
+
+ private final List fieldInfos = Arrays.asList(
+ new FieldInfo(Field.NAMESPACE, 0, true),
+ new FieldInfo(Field.REGION_COUNT, 7, true),
+ new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.STORE_FILE_SIZE, 13, true),
+ new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 15, false),
+ new FieldInfo(Field.NUM_STORE_FILES, 7, true),
+ new FieldInfo(Field.MEM_STORE_SIZE, 11, true)
+ );
+
+ private final RegionModeStrategy regionModeStrategy = new RegionModeStrategy();
+
+ NamespaceModeStrategy(){
+ }
+
+ @Override
+ public List getFieldInfos() {
+ return fieldInfos;
+ }
+
+ @Override
+ public Field getDefaultSortField() {
+ return Field.REQUEST_COUNT_PER_SECOND;
+ }
+
+ @Override
+ public List getRecords(ClusterMetrics clusterMetrics) {
+ // Get records from RegionModeStrategy and add REGION_COUNT field
+ List records = regionModeStrategy.getRecords(clusterMetrics).stream()
+ .map(record ->
+ Record.ofEntries(fieldInfos.stream()
+ .filter(fi -> record.containsKey(fi.getField()))
+ .map(fi -> Record.entry(fi.getField(), record.get(fi.getField())))))
+ .map(record -> Record.builder().putAll(record).put(Field.REGION_COUNT, 1).build())
+ .collect(Collectors.toList());
+
+ // Aggregation by NAMESPACE field
+ return records.stream()
+ .collect(Collectors.groupingBy(r -> r.get(Field.NAMESPACE).asString()))
+ .entrySet().stream()
+ .flatMap(
+ e -> e.getValue().stream()
+ .reduce(Record::combine)
+ .map(Stream::of)
+ .orElse(Stream.empty()))
+ .collect(Collectors.toList());
+ }
+
+ @Override
+ public DrillDownInfo drillDown(Record selectedRecord) {
+ List initialFilters =
+ Collections.singletonList(RecordFilter.newBuilder(Field.NAMESPACE)
+ .doubleEquals(selectedRecord.get(Field.NAMESPACE)));
+ return new DrillDownInfo(Mode.TABLE, initialFilters);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeStrategy.java
new file mode 100644
index 00000000000..e5deda06be1
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeStrategy.java
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.commons.lang3.time.FastDateFormat;
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.RegionMetrics;
+import org.apache.hadoop.hbase.ServerMetrics;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Implementation for {@link ModeStrategy} for Region Mode.
+ */
+@InterfaceAudience.Private
+public final class RegionModeStrategy implements ModeStrategy {
+
+ private final List fieldInfos = Arrays.asList(
+ new FieldInfo(Field.REGION_NAME, 0, false),
+ new FieldInfo(Field.NAMESPACE, 0, true),
+ new FieldInfo(Field.TABLE, 0, true),
+ new FieldInfo(Field.START_CODE, 13, false),
+ new FieldInfo(Field.REPLICA_ID, 5, false),
+ new FieldInfo(Field.REGION, 32, true),
+ new FieldInfo(Field.REGION_SERVER, 0, true),
+ new FieldInfo(Field.LONG_REGION_SERVER, 0, false),
+ new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.STORE_FILE_SIZE, 10, true),
+ new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 12, false),
+ new FieldInfo(Field.NUM_STORE_FILES,4, true),
+ new FieldInfo(Field.MEM_STORE_SIZE, 8, true),
+ new FieldInfo(Field.LOCALITY, 8, true),
+ new FieldInfo(Field.START_KEY, 0, false),
+ new FieldInfo(Field.COMPACTING_CELL_COUNT, 12, false),
+ new FieldInfo(Field.COMPACTED_CELL_COUNT, 12, false),
+ new FieldInfo(Field.COMPACTION_PROGRESS, 7, false),
+ new FieldInfo(Field.LAST_MAJOR_COMPACTION_TIME, 19, false)
+ );
+
+ private final Map requestCountPerSecondMap = new HashMap<>();
+
+ RegionModeStrategy() {
+ }
+
+ @Override
+ public List getFieldInfos() {
+ return fieldInfos;
+ }
+
+ @Override
+ public Field getDefaultSortField() {
+ return Field.REQUEST_COUNT_PER_SECOND;
+ }
+
+ @Override
+ public List getRecords(ClusterMetrics clusterMetrics) {
+ List ret = new ArrayList<>();
+ for (ServerMetrics sm : clusterMetrics.getLiveServerMetrics().values()) {
+ long lastReportTimestamp = sm.getLastReportTimestamp();
+ for (RegionMetrics rm : sm.getRegionMetrics().values()) {
+ ret.add(createRecord(sm, rm, lastReportTimestamp));
+ }
+ }
+ return ret;
+ }
+
+ private Record createRecord(ServerMetrics serverMetrics, RegionMetrics regionMetrics,
+ long lastReportTimestamp) {
+
+ Record.Builder builder = Record.builder();
+
+ String regionName = regionMetrics.getNameAsString();
+ builder.put(Field.REGION_NAME, regionName);
+
+ String namespaceName = "";
+ String tableName = "";
+ String region = "";
+ String startKey = "";
+ String startCode = "";
+ String replicaId = "";
+ try {
+ byte[][] elements = RegionInfo.parseRegionName(regionMetrics.getRegionName());
+ TableName tn = TableName.valueOf(elements[0]);
+ namespaceName = tn.getNamespaceAsString();
+ tableName = tn.getQualifierAsString();
+ startKey = Bytes.toStringBinary(elements[1]);
+ startCode = Bytes.toString(elements[2]);
+ replicaId = elements.length == 4 ?
+ Integer.valueOf(Bytes.toString(elements[3])).toString() : "";
+ region = RegionInfo.encodeRegionName(regionMetrics.getRegionName());
+ } catch (IOException ignored) {
+ }
+
+ builder.put(Field.NAMESPACE, namespaceName);
+ builder.put(Field.TABLE, tableName);
+ builder.put(Field.START_CODE, startCode);
+ builder.put(Field.REPLICA_ID, replicaId);
+ builder.put(Field.REGION, region);
+ builder.put(Field.START_KEY, startKey);
+ builder.put(Field.REGION_SERVER, serverMetrics.getServerName().toShortString());
+ builder.put(Field.LONG_REGION_SERVER, serverMetrics.getServerName().getServerName());
+
+ RequestCountPerSecond requestCountPerSecond = requestCountPerSecondMap.get(regionName);
+ if (requestCountPerSecond == null) {
+ requestCountPerSecond = new RequestCountPerSecond();
+ requestCountPerSecondMap.put(regionName, requestCountPerSecond);
+ }
+ requestCountPerSecond.refresh(lastReportTimestamp, regionMetrics.getReadRequestCount(),
+ regionMetrics.getFilteredReadRequestCount(), regionMetrics.getWriteRequestCount());
+
+ builder.put(Field.READ_REQUEST_COUNT_PER_SECOND,
+ requestCountPerSecond.getReadRequestCountPerSecond());
+ builder.put(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND,
+ requestCountPerSecond.getFilteredReadRequestCountPerSecond());
+ builder.put(Field.WRITE_REQUEST_COUNT_PER_SECOND,
+ requestCountPerSecond.getWriteRequestCountPerSecond());
+ builder.put(Field.REQUEST_COUNT_PER_SECOND,
+ requestCountPerSecond.getRequestCountPerSecond());
+
+ builder.put(Field.STORE_FILE_SIZE, regionMetrics.getStoreFileSize());
+ builder.put(Field.UNCOMPRESSED_STORE_FILE_SIZE, regionMetrics.getUncompressedStoreFileSize());
+ builder.put(Field.NUM_STORE_FILES, regionMetrics.getStoreFileCount());
+ builder.put(Field.MEM_STORE_SIZE, regionMetrics.getMemStoreSize());
+ builder.put(Field.LOCALITY, regionMetrics.getDataLocality());
+
+ long compactingCellCount = regionMetrics.getCompactingCellCount();
+ long compactedCellCount = regionMetrics.getCompactedCellCount();
+ float compactionProgress = 0;
+ if (compactedCellCount > 0) {
+ compactionProgress = 100 * ((float) compactedCellCount / compactingCellCount);
+ }
+
+ builder.put(Field.COMPACTING_CELL_COUNT, compactingCellCount);
+ builder.put(Field.COMPACTED_CELL_COUNT, compactedCellCount);
+ builder.put(Field.COMPACTION_PROGRESS, compactionProgress);
+
+ FastDateFormat df = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss");
+ long lastMajorCompactionTimestamp = regionMetrics.getLastMajorCompactionTimestamp();
+
+ builder.put(Field.LAST_MAJOR_COMPACTION_TIME,
+ lastMajorCompactionTimestamp == 0 ? "" : df.format(lastMajorCompactionTimestamp));
+
+ return builder.build();
+ }
+
+ @Nullable
+ @Override
+ public DrillDownInfo drillDown(Record selectedRecord) {
+ // do nothing
+ return null;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeStrategy.java
new file mode 100644
index 00000000000..58380f665ae
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeStrategy.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.ServerMetrics;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Implementation for {@link ModeStrategy} for RegionServer Mode.
+ */
+@InterfaceAudience.Private
+public final class RegionServerModeStrategy implements ModeStrategy {
+
+ private final List fieldInfos = Arrays.asList(
+ new FieldInfo(Field.REGION_SERVER, 0, true),
+ new FieldInfo(Field.LONG_REGION_SERVER, 0, false),
+ new FieldInfo(Field.REGION_COUNT, 7, true),
+ new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.STORE_FILE_SIZE, 13, true),
+ new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 15, false),
+ new FieldInfo(Field.NUM_STORE_FILES, 7, true),
+ new FieldInfo(Field.MEM_STORE_SIZE, 11, true),
+ new FieldInfo(Field.USED_HEAP_SIZE, 11, true),
+ new FieldInfo(Field.MAX_HEAP_SIZE, 11, true)
+ );
+
+ private final RegionModeStrategy regionModeStrategy = new RegionModeStrategy();
+
+ RegionServerModeStrategy(){
+ }
+
+ @Override
+ public List getFieldInfos() {
+ return fieldInfos;
+ }
+
+ @Override
+ public Field getDefaultSortField() {
+ return Field.REQUEST_COUNT_PER_SECOND;
+ }
+
+ @Override
+ public List getRecords(ClusterMetrics clusterMetrics) {
+ // Get records from RegionModeStrategy and add REGION_COUNT field
+ List records = regionModeStrategy.getRecords(clusterMetrics).stream()
+ .map(record ->
+ Record.ofEntries(fieldInfos.stream()
+ .filter(fi -> record.containsKey(fi.getField()))
+ .map(fi -> Record.entry(fi.getField(), record.get(fi.getField())))))
+ .map(record -> Record.builder().putAll(record).put(Field.REGION_COUNT, 1).build())
+ .collect(Collectors.toList());
+
+ // Aggregation by LONG_REGION_SERVER field
+ Map retMap = records.stream()
+ .collect(Collectors.groupingBy(r -> r.get(Field.LONG_REGION_SERVER).asString()))
+ .entrySet().stream()
+ .flatMap(
+ e -> e.getValue().stream()
+ .reduce(Record::combine)
+ .map(Stream::of)
+ .orElse(Stream.empty()))
+ .collect(Collectors.toMap(r -> r.get(Field.LONG_REGION_SERVER).asString(), r -> r));
+
+ // Add USED_HEAP_SIZE field and MAX_HEAP_SIZE field
+ for (ServerMetrics sm : clusterMetrics.getLiveServerMetrics().values()) {
+ Record record = retMap.get(sm.getServerName().getServerName());
+ if (record == null) {
+ continue;
+ }
+
+ Record newRecord = Record.builder().putAll(record)
+ .put(Field.USED_HEAP_SIZE, sm.getUsedHeapSize())
+ .put(Field.MAX_HEAP_SIZE, sm.getMaxHeapSize()).build();
+
+ retMap.put(sm.getServerName().getServerName(), newRecord);
+ }
+
+ return new ArrayList<>(retMap.values());
+ }
+
+ @Override
+ public DrillDownInfo drillDown(Record selectedRecord) {
+ List initialFilters = Collections.singletonList(RecordFilter
+ .newBuilder(Field.REGION_SERVER)
+ .doubleEquals(selectedRecord.get(Field.REGION_SERVER)));
+ return new DrillDownInfo(Mode.REGION, initialFilters);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecond.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecond.java
new file mode 100644
index 00000000000..508cf829bc9
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecond.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Utility class for calculating request counts per second.
+ */
+@InterfaceAudience.Private
+public class RequestCountPerSecond {
+ private long previousLastReportTimestamp;
+ private long previousReadRequestCount;
+ private long previousFilteredReadRequestCount;
+ private long previousWriteRequestCount;
+ private long readRequestCountPerSecond;
+ private long filteredReadRequestCountPerSecond;
+ private long writeRequestCountPerSecond;
+
+ public void refresh(long lastReportTimestamp, long readRequestCount,
+ long filteredReadRequestCount, long writeRequestCount) {
+ if (previousLastReportTimestamp == 0) {
+ previousLastReportTimestamp = lastReportTimestamp;
+ previousReadRequestCount = readRequestCount;
+ previousFilteredReadRequestCount = filteredReadRequestCount;
+ previousWriteRequestCount = writeRequestCount;
+ } else if (previousLastReportTimestamp != lastReportTimestamp) {
+ readRequestCountPerSecond = (readRequestCount - previousReadRequestCount) /
+ ((lastReportTimestamp - previousLastReportTimestamp) / 1000);
+ filteredReadRequestCountPerSecond =
+ (filteredReadRequestCount - previousFilteredReadRequestCount) /
+ ((lastReportTimestamp - previousLastReportTimestamp) / 1000);
+ writeRequestCountPerSecond = (writeRequestCount - previousWriteRequestCount) /
+ ((lastReportTimestamp - previousLastReportTimestamp) / 1000);
+
+ previousLastReportTimestamp = lastReportTimestamp;
+ previousReadRequestCount = readRequestCount;
+ previousFilteredReadRequestCount = filteredReadRequestCount;
+ previousWriteRequestCount = writeRequestCount;
+ }
+ }
+
+ public long getReadRequestCountPerSecond() {
+ return readRequestCountPerSecond < 0 ? 0 : readRequestCountPerSecond;
+ }
+
+ public long getFilteredReadRequestCountPerSecond() {
+ return filteredReadRequestCountPerSecond < 0 ? 0 : filteredReadRequestCountPerSecond;
+ }
+
+ public long getWriteRequestCountPerSecond() {
+ return writeRequestCountPerSecond < 0 ? 0 : writeRequestCountPerSecond;
+ }
+
+ public long getRequestCountPerSecond() {
+ return getReadRequestCountPerSecond() + getWriteRequestCountPerSecond();
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/TableModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/TableModeStrategy.java
new file mode 100644
index 00000000000..fb8d66d74e5
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/TableModeStrategy.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Implementation for {@link ModeStrategy} for Table Mode.
+ */
+@InterfaceAudience.Private
+public final class TableModeStrategy implements ModeStrategy {
+
+ private final List fieldInfos = Arrays.asList(
+ new FieldInfo(Field.NAMESPACE, 0, true),
+ new FieldInfo(Field.TABLE, 0, true),
+ new FieldInfo(Field.REGION_COUNT, 7, true),
+ new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true),
+ new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.STORE_FILE_SIZE, 13, true),
+ new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 15, false),
+ new FieldInfo(Field.NUM_STORE_FILES, 7, true),
+ new FieldInfo(Field.MEM_STORE_SIZE, 11, true)
+ );
+
+ private final RegionModeStrategy regionModeStrategy = new RegionModeStrategy();
+
+ TableModeStrategy() {
+ }
+
+ @Override
+ public List getFieldInfos() {
+ return fieldInfos;
+ }
+
+ @Override
+ public Field getDefaultSortField() {
+ return Field.REQUEST_COUNT_PER_SECOND;
+ }
+
+ @Override
+ public List getRecords(ClusterMetrics clusterMetrics) {
+ // Get records from RegionModeStrategy and add REGION_COUNT field
+ List records = regionModeStrategy.getRecords(clusterMetrics).stream()
+ .map(record ->
+ Record.ofEntries(fieldInfos.stream()
+ .filter(fi -> record.containsKey(fi.getField()))
+ .map(fi -> Record.entry(fi.getField(), record.get(fi.getField())))))
+ .map(record -> Record.builder().putAll(record).put(Field.REGION_COUNT, 1).build())
+ .collect(Collectors.toList());
+
+ // Aggregation by NAMESPACE field and TABLE field
+ return records.stream()
+ .collect(Collectors.groupingBy(r -> {
+ String namespace = r.get(Field.NAMESPACE).asString();
+ String table = r.get(Field.TABLE).asString();
+ return TableName.valueOf(namespace, table);
+ }))
+ .entrySet().stream()
+ .flatMap(
+ e -> e.getValue().stream()
+ .reduce(Record::combine)
+ .map(Stream::of)
+ .orElse(Stream.empty()))
+ .collect(Collectors.toList());
+ }
+
+ @Override
+ public DrillDownInfo drillDown(Record selectedRecord) {
+ List initialFilters = Arrays.asList(
+ RecordFilter.newBuilder(Field.NAMESPACE).doubleEquals(selectedRecord.get(Field.NAMESPACE)),
+ RecordFilter.newBuilder(Field.TABLE).doubleEquals(selectedRecord.get(Field.TABLE)));
+ return new DrillDownInfo(Mode.REGION, initialFilters);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/AbstractScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/AbstractScreenView.java
new file mode 100644
index 00000000000..cb5520e0b5f
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/AbstractScreenView.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * An abstract class for {@link ScreenView} that has the common useful methods and the default
+ * implementations for the abstract methods.
+ */
+@InterfaceAudience.Private
+public abstract class AbstractScreenView implements ScreenView {
+
+ private final Screen screen;
+ private final Terminal terminal;
+
+ public AbstractScreenView(Screen screen, Terminal terminal) {
+ this.screen = Objects.requireNonNull(screen);
+ this.terminal = Objects.requireNonNull(terminal);
+ }
+
+ @Override
+ public void init() {
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ return this;
+ }
+
+ @Override
+ public ScreenView handleTimer() {
+ return this;
+ }
+
+ protected Screen getScreen() {
+ return screen;
+ }
+
+ protected Terminal getTerminal() {
+ return terminal;
+ }
+
+ protected void setTimer(long delay) {
+ screen.setTimer(delay);
+ }
+
+ protected void cancelTimer() {
+ screen.cancelTimer();
+ }
+
+ protected TerminalPrinter getTerminalPrinter(int startRow) {
+ return terminal.getTerminalPrinter(startRow);
+ }
+
+ protected TerminalSize getTerminalSize() {
+ return terminal.getSize();
+ }
+
+ @Nullable
+ protected TerminalSize doResizeIfNecessary() {
+ return terminal.doResizeIfNecessary();
+ }
+
+ public void clearTerminal() {
+ terminal.clear();
+ }
+
+ public void refreshTerminal() {
+ terminal.refresh();
+ }
+
+ public void hideCursor() {
+ terminal.hideCursor();
+ }
+
+ public void setCursorPosition(int column, int row) {
+ terminal.setCursorPosition(column, row);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/Screen.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/Screen.java
new file mode 100644
index 00000000000..aa951cfa30a
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/Screen.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.impl.TerminalImpl;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * This dispatches key presses and timers to the current {@link ScreenView}.
+ */
+@InterfaceAudience.Private
+public class Screen implements Closeable {
+ private static final Logger LOGGER = LoggerFactory.getLogger(Screen.class);
+ private static final long SLEEP_TIMEOUT_MILLISECONDS = 100;
+
+ private final Connection connection;
+ private final Admin admin;
+ private final Terminal terminal;
+
+ private ScreenView currentScreenView;
+ private Long timerTimestamp;
+
+ public Screen(Configuration conf, long initialRefreshDelay, Mode initialMode)
+ throws IOException {
+ connection = ConnectionFactory.createConnection(conf);
+ admin = connection.getAdmin();
+
+ // The first screen is the top screen
+ this.terminal = new TerminalImpl("hbtop");
+ currentScreenView = new TopScreenView(this, terminal, initialRefreshDelay, admin,
+ initialMode);
+ }
+
+ @Override
+ public void close() throws IOException {
+ try {
+ admin.close();
+ } finally {
+ try {
+ connection.close();
+ } finally {
+ terminal.close();
+ }
+ }
+ }
+
+ public void run() {
+ currentScreenView.init();
+ while (true) {
+ try {
+ KeyPress keyPress = terminal.pollKeyPress();
+
+ ScreenView nextScreenView;
+ if (keyPress != null) {
+ // Dispatch the key press to the current screen
+ nextScreenView = currentScreenView.handleKeyPress(keyPress);
+ } else {
+ if (timerTimestamp != null) {
+ long now = System.currentTimeMillis();
+ if (timerTimestamp <= now) {
+ // Dispatch the timer to the current screen
+ timerTimestamp = null;
+ nextScreenView = currentScreenView.handleTimer();
+ } else {
+ if (timerTimestamp - now < SLEEP_TIMEOUT_MILLISECONDS) {
+ TimeUnit.MILLISECONDS.sleep(timerTimestamp - now);
+ } else {
+ TimeUnit.MILLISECONDS.sleep(SLEEP_TIMEOUT_MILLISECONDS);
+ }
+ continue;
+ }
+ } else {
+ TimeUnit.MILLISECONDS.sleep(SLEEP_TIMEOUT_MILLISECONDS);
+ continue;
+ }
+ }
+
+ // If the next screen is null, then exit
+ if (nextScreenView == null) {
+ return;
+ }
+
+ // If the next screen is not the previous, then go to the next screen
+ if (nextScreenView != currentScreenView) {
+ currentScreenView = nextScreenView;
+ currentScreenView.init();
+ }
+ } catch (Exception e) {
+ LOGGER.error("Caught an exception", e);
+ }
+ }
+ }
+
+ public void setTimer(long delay) {
+ timerTimestamp = System.currentTimeMillis() + delay;
+ }
+
+ public void cancelTimer() {
+ timerTimestamp = null;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/ScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/ScreenView.java
new file mode 100644
index 00000000000..f061bff831d
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/ScreenView.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * An interface for a screen view that handles key presses and timers.
+ */
+@InterfaceAudience.Private
+public interface ScreenView {
+ void init();
+ @Nullable ScreenView handleKeyPress(KeyPress keyPress);
+ @Nullable ScreenView handleTimer();
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenter.java
new file mode 100644
index 00000000000..45f5fd01efb
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenter.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.field;
+
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Objects;
+
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the field screen.
+ */
+@InterfaceAudience.Private
+public class FieldScreenPresenter {
+
+ @FunctionalInterface
+ public interface ResultListener {
+ void accept(Field sortField, List fields, EnumMap fieldDisplayMap);
+ }
+
+ private final FieldScreenView fieldScreenView;
+ private Field sortField;
+ private final List fields;
+ private final EnumMap fieldDisplayMap;
+ private final ResultListener resultListener;
+ private final ScreenView nextScreenView;
+
+ private final int headerMaxLength;
+ private final int descriptionMaxLength;
+
+ private int currentPosition;
+ private boolean moveMode;
+
+ public FieldScreenPresenter(FieldScreenView fieldScreenView, Field sortField, List fields,
+ EnumMap fieldDisplayMap, ResultListener resultListener,
+ ScreenView nextScreenView) {
+ this.fieldScreenView = Objects.requireNonNull(fieldScreenView);
+ this.sortField = Objects.requireNonNull(sortField);
+ this.fields = new ArrayList<>(Objects.requireNonNull(fields));
+ this.fieldDisplayMap = new EnumMap<>(Objects.requireNonNull(fieldDisplayMap));
+ this.resultListener = Objects.requireNonNull(resultListener);
+ this.nextScreenView = Objects.requireNonNull(nextScreenView);
+
+ int headerLength = 0;
+ int descriptionLength = 0;
+ for (int i = 0; i < fields.size(); i ++) {
+ Field field = fields.get(i);
+
+ if (field == sortField) {
+ currentPosition = i;
+ }
+
+ if (headerLength < field.getHeader().length()) {
+ headerLength = field.getHeader().length();
+ }
+
+ if (descriptionLength < field.getDescription().length()) {
+ descriptionLength = field.getDescription().length();
+ }
+ }
+
+ headerMaxLength = headerLength;
+ descriptionMaxLength = descriptionLength;
+ }
+
+ public void init() {
+ fieldScreenView.hideCursor();
+ fieldScreenView.clearTerminal();
+ fieldScreenView.showFieldScreen(sortField.getHeader(), fields, fieldDisplayMap,
+ currentPosition, headerMaxLength, descriptionMaxLength, moveMode);
+ fieldScreenView.refreshTerminal();
+ }
+
+ public void arrowUp() {
+ if (currentPosition > 0) {
+ currentPosition -= 1;
+
+ if (moveMode) {
+ Field tmp = fields.remove(currentPosition);
+ fields.add(currentPosition + 1, tmp);
+ }
+
+ showField(currentPosition);
+ showField(currentPosition + 1);
+ fieldScreenView.refreshTerminal();
+ }
+ }
+
+ public void arrowDown() {
+ if (currentPosition < fields.size() - 1) {
+ currentPosition += 1;
+
+ if (moveMode) {
+ Field tmp = fields.remove(currentPosition - 1);
+ fields.add(currentPosition, tmp);
+ }
+
+ showField(currentPosition);
+ showField(currentPosition - 1);
+ fieldScreenView.refreshTerminal();
+ }
+ }
+
+ public void pageUp() {
+ if (currentPosition > 0 && !moveMode) {
+ int previousPosition = currentPosition;
+ currentPosition = 0;
+ showField(previousPosition);
+ showField(currentPosition);
+ fieldScreenView.refreshTerminal();
+ }
+ }
+
+ public void pageDown() {
+ if (currentPosition < fields.size() - 1 && !moveMode) {
+ int previousPosition = currentPosition;
+ currentPosition = fields.size() - 1;
+ showField(previousPosition);
+ showField(currentPosition);
+ fieldScreenView.refreshTerminal();
+ }
+ }
+
+ public void turnOnMoveMode() {
+ moveMode = true;
+ showField(currentPosition);
+ fieldScreenView.refreshTerminal();
+ }
+
+ public void turnOffMoveMode() {
+ moveMode = false;
+ showField(currentPosition);
+ fieldScreenView.refreshTerminal();
+ }
+
+ public void switchFieldDisplay() {
+ if (!moveMode) {
+ Field field = fields.get(currentPosition);
+ fieldDisplayMap.put(field, !fieldDisplayMap.get(field));
+ showField(currentPosition);
+ fieldScreenView.refreshTerminal();
+ }
+ }
+
+ private void showField(int pos) {
+ Field field = fields.get(pos);
+ fieldScreenView.showField(pos, field, fieldDisplayMap.get(field), pos == currentPosition,
+ headerMaxLength, descriptionMaxLength, moveMode);
+ }
+
+ public void setSortField() {
+ if (!moveMode) {
+ Field newSortField = fields.get(currentPosition);
+ if (newSortField != this.sortField) {
+ this.sortField = newSortField;
+ fieldScreenView.showScreenDescription(sortField.getHeader());
+ fieldScreenView.refreshTerminal();
+ }
+ }
+ }
+
+ public ScreenView transitionToNextScreen() {
+ resultListener.accept(sortField, fields, fieldDisplayMap);
+ return nextScreenView;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenView.java
new file mode 100644
index 00000000000..0d9c6b98fec
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenView.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.field;
+
+import java.util.EnumMap;
+import java.util.List;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The screen where we can change the displayed fields, the sort key and the order of the fields.
+ */
+@InterfaceAudience.Private
+public class FieldScreenView extends AbstractScreenView {
+
+ private static final int SCREEN_DESCRIPTION_START_ROW = 0;
+ private static final int FIELD_START_ROW = 5;
+
+ private final FieldScreenPresenter fieldScreenPresenter;
+
+ public FieldScreenView(Screen screen, Terminal terminal, Field sortField, List fields,
+ EnumMap fieldDisplayMap, FieldScreenPresenter.ResultListener resultListener,
+ ScreenView nextScreenView) {
+ super(screen, terminal);
+ this.fieldScreenPresenter = new FieldScreenPresenter(this, sortField, fields, fieldDisplayMap,
+ resultListener, nextScreenView);
+ }
+
+ @Override
+ public void init() {
+ fieldScreenPresenter.init();
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ switch (keyPress.getType()) {
+ case Escape:
+ return fieldScreenPresenter.transitionToNextScreen();
+
+ case ArrowUp:
+ fieldScreenPresenter.arrowUp();
+ return this;
+
+ case ArrowDown:
+ fieldScreenPresenter.arrowDown();
+ return this;
+
+ case PageUp:
+ case Home:
+ fieldScreenPresenter.pageUp();
+ return this;
+
+ case PageDown:
+ case End:
+ fieldScreenPresenter.pageDown();
+ return this;
+
+ case ArrowRight:
+ fieldScreenPresenter.turnOnMoveMode();
+ return this;
+
+ case ArrowLeft:
+ case Enter:
+ fieldScreenPresenter.turnOffMoveMode();
+ return this;
+ }
+
+ if (keyPress.getType() != KeyPress.Type.Character) {
+ return this;
+ }
+
+ assert keyPress.getCharacter() != null;
+ switch (keyPress.getCharacter()) {
+ case 'd':
+ case ' ':
+ fieldScreenPresenter.switchFieldDisplay();
+ break;
+
+ case 's':
+ fieldScreenPresenter.setSortField();
+ break;
+
+ case 'q':
+ return fieldScreenPresenter.transitionToNextScreen();
+ }
+
+ return this;
+ }
+
+ public void showFieldScreen(String sortFieldHeader, List fields,
+ EnumMap fieldDisplayMap, int currentPosition, int headerMaxLength,
+ int descriptionMaxLength, boolean moveMode) {
+ showScreenDescription(sortFieldHeader);
+
+ for (int i = 0; i < fields.size(); i ++) {
+ Field field = fields.get(i);
+ showField(i, field, fieldDisplayMap.get(field), i == currentPosition, headerMaxLength,
+ descriptionMaxLength, moveMode);
+ }
+ }
+
+ public void showScreenDescription(String sortKeyHeader) {
+ TerminalPrinter printer = getTerminalPrinter(SCREEN_DESCRIPTION_START_ROW);
+ printer.startBold().print("Fields Management").stopBold().endOfLine();
+ printer.print("Current Sort Field: ").startBold().print(sortKeyHeader).stopBold().endOfLine();
+ printer.print("Navigate with up/down, Right selects for move then or Left commits,")
+ .endOfLine();
+ printer.print("'d' or toggles display, 's' sets sort. Use 'q' or to end!")
+ .endOfLine();
+ }
+
+ public void showField(int pos, Field field, boolean display, boolean selected,
+ int fieldHeaderMaxLength, int fieldDescriptionMaxLength, boolean moveMode) {
+
+ String fieldHeader = String.format("%-" + fieldHeaderMaxLength + "s", field.getHeader());
+ String fieldDescription = String.format("%-" + fieldDescriptionMaxLength + "s",
+ field.getDescription());
+
+ int row = FIELD_START_ROW + pos;
+ TerminalPrinter printer = getTerminalPrinter(row);
+ if (selected) {
+ String prefix = display ? "* " : " ";
+ if (moveMode) {
+ printer.print(prefix);
+
+ if (display) {
+ printer.startBold();
+ }
+
+ printer.startHighlight()
+ .printFormat("%s = %s", fieldHeader, fieldDescription).stopHighlight();
+
+ if (display) {
+ printer.stopBold();
+ }
+
+ printer.endOfLine();
+ } else {
+ printer.print(prefix);
+
+ if (display) {
+ printer.startBold();
+ }
+
+ printer.startHighlight().print(fieldHeader).stopHighlight()
+ .printFormat(" = %s", fieldDescription);
+
+ if (display) {
+ printer.stopBold();
+ }
+
+ printer.endOfLine();
+ }
+ } else {
+ if (display) {
+ printer.print("* ").startBold().printFormat("%s = %s", fieldHeader, fieldDescription)
+ .stopBold().endOfLine();
+ } else {
+ printer.printFormat(" %s = %s", fieldHeader, fieldDescription).endOfLine();
+ }
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/CommandDescription.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/CommandDescription.java
new file mode 100644
index 00000000000..5002ab8f6c1
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/CommandDescription.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.help;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents a description of a command that we can execute in the top screen.
+ */
+@InterfaceAudience.Private
+public class CommandDescription {
+
+ private final List keys;
+ private final String description;
+
+ public CommandDescription(String key, String description) {
+ this(Collections.singletonList(Objects.requireNonNull(key)), description);
+ }
+
+ public CommandDescription(List keys, String description) {
+ this.keys = Collections.unmodifiableList(new ArrayList<>(Objects.requireNonNull(keys)));
+ this.description = Objects.requireNonNull(description);
+ }
+
+ public List getKeys() {
+ return keys;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenter.java
new file mode 100644
index 00000000000..f170fc57fde
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenter.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.help;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the help screen.
+ */
+@InterfaceAudience.Private
+public class HelpScreenPresenter {
+
+ private static final CommandDescription[] COMMAND_DESCRIPTIONS = new CommandDescription[] {
+ new CommandDescription("f", "Add/Remove/Order/Sort the fields"),
+ new CommandDescription("R", "Toggle the sort order (ascending/descending)"),
+ new CommandDescription("m", "Select mode"),
+ new CommandDescription("o", "Add a filter with ignoring case"),
+ new CommandDescription("O", "Add a filter with case sensitive"),
+ new CommandDescription("^o", "Show the current filters"),
+ new CommandDescription("=", "Clear the current filters"),
+ new CommandDescription("i", "Drill down"),
+ new CommandDescription(
+ Arrays.asList("up", "down", "left", "right", "pageUp", "pageDown", "home", "end"),
+ "Scroll the metrics"),
+ new CommandDescription("d", "Change the refresh delay"),
+ new CommandDescription("X", "Adjust the field length"),
+ new CommandDescription("", "Refresh the display"),
+ new CommandDescription("h", "Display this screen"),
+ new CommandDescription(Arrays.asList("q", ""), "Quit")
+ };
+
+ private final HelpScreenView helpScreenView;
+ private final long refreshDelay;
+ private final ScreenView nextScreenView;
+
+ public HelpScreenPresenter(HelpScreenView helpScreenView, long refreshDelay,
+ ScreenView nextScreenView) {
+ this.helpScreenView = Objects.requireNonNull(helpScreenView);
+ this.refreshDelay = refreshDelay;
+ this.nextScreenView = Objects.requireNonNull(nextScreenView);
+ }
+
+ public void init() {
+ helpScreenView.hideCursor();
+ helpScreenView.clearTerminal();
+ helpScreenView.showHelpScreen(refreshDelay, COMMAND_DESCRIPTIONS);
+ helpScreenView.refreshTerminal();
+ }
+
+ public ScreenView transitionToNextScreen() {
+ return nextScreenView;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenView.java
new file mode 100644
index 00000000000..ccdc15737d1
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenView.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.help;
+
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The help screen.
+ */
+@InterfaceAudience.Private
+public class HelpScreenView extends AbstractScreenView {
+
+ private static final int SCREEN_DESCRIPTION_START_ROW = 0;
+ private static final int COMMAND_DESCRIPTION_START_ROW = 3;
+
+ private final HelpScreenPresenter helpScreenPresenter;
+
+ public HelpScreenView(Screen screen, Terminal terminal, long refreshDelay,
+ ScreenView nextScreenView) {
+ super(screen, terminal);
+ this.helpScreenPresenter = new HelpScreenPresenter(this, refreshDelay, nextScreenView);
+ }
+
+ @Override
+ public void init() {
+ helpScreenPresenter.init();
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ return helpScreenPresenter.transitionToNextScreen();
+ }
+
+ public void showHelpScreen(long refreshDelay, CommandDescription[] commandDescriptions) {
+ showScreenDescription(refreshDelay);
+
+ TerminalPrinter printer = getTerminalPrinter(COMMAND_DESCRIPTION_START_ROW);
+ for (CommandDescription commandDescription : commandDescriptions) {
+ showCommandDescription(printer, commandDescription);
+ }
+
+ printer.endOfLine();
+ printer.print("Press any key to continue").endOfLine();
+ }
+
+ private void showScreenDescription(long refreshDelay) {
+ TerminalPrinter printer = getTerminalPrinter(SCREEN_DESCRIPTION_START_ROW);
+ printer.startBold().print("Help for Interactive Commands").stopBold().endOfLine();
+ printer.print("Refresh delay: ").startBold()
+ .print((double) refreshDelay / 1000).stopBold().endOfLine();
+ }
+
+ private void showCommandDescription(TerminalPrinter terminalPrinter,
+ CommandDescription commandDescription) {
+ terminalPrinter.print(" ");
+ boolean first = true;
+ for (String key : commandDescription.getKeys()) {
+ if (first) {
+ first = false;
+ } else {
+ terminalPrinter.print(",");
+ }
+ terminalPrinter.startBold().print(key).stopBold();
+ }
+
+ terminalPrinter.printFormat(": %s", commandDescription.getDescription()).endOfLine();
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenter.java
new file mode 100644
index 00000000000..8cd9879b0ed
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenter.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.mode;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.function.Consumer;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the mode screen.
+ */
+@InterfaceAudience.Private
+public class ModeScreenPresenter {
+
+ private final ModeScreenView modeScreenView;
+ private final Mode currentMode;
+ private final Consumer resultListener;
+ private final ScreenView nextScreenView;
+
+ private final int modeHeaderMaxLength;
+ private final int modeDescriptionMaxLength;
+ private final List modes = Arrays.asList(Mode.values());
+
+ private int currentPosition;
+
+ public ModeScreenPresenter(ModeScreenView modeScreenView, Mode currentMode,
+ Consumer resultListener, ScreenView nextScreenView) {
+ this.modeScreenView = Objects.requireNonNull(modeScreenView);
+ this.currentMode = Objects.requireNonNull(currentMode);
+ this.resultListener = Objects.requireNonNull(resultListener);
+ this.nextScreenView = Objects.requireNonNull(nextScreenView);
+
+ int modeHeaderLength = 0;
+ int modeDescriptionLength = 0;
+ for (int i = 0; i < modes.size(); i++) {
+ Mode mode = modes.get(i);
+ if (mode == currentMode) {
+ currentPosition = i;
+ }
+
+ if (modeHeaderLength < mode.getHeader().length()) {
+ modeHeaderLength = mode.getHeader().length();
+ }
+
+ if (modeDescriptionLength < mode.getDescription().length()) {
+ modeDescriptionLength = mode.getDescription().length();
+ }
+ }
+
+ modeHeaderMaxLength = modeHeaderLength;
+ modeDescriptionMaxLength = modeDescriptionLength;
+ }
+
+ public void init() {
+ modeScreenView.hideCursor();
+ modeScreenView.clearTerminal();
+ modeScreenView.showModeScreen(currentMode, modes, currentPosition, modeHeaderMaxLength,
+ modeDescriptionMaxLength);
+ modeScreenView.refreshTerminal();
+ }
+
+ public void arrowUp() {
+ if (currentPosition > 0) {
+ currentPosition -= 1;
+ showMode(currentPosition);
+ showMode(currentPosition + 1);
+ modeScreenView.refreshTerminal();
+ }
+ }
+
+ public void arrowDown() {
+ if (currentPosition < modes.size() - 1) {
+ currentPosition += 1;
+ showMode(currentPosition);
+ showMode(currentPosition - 1);
+ modeScreenView.refreshTerminal();
+ }
+ }
+
+ public void pageUp() {
+ if (currentPosition > 0) {
+ int previousPosition = currentPosition;
+ currentPosition = 0;
+ showMode(previousPosition);
+ showMode(currentPosition);
+ modeScreenView.refreshTerminal();
+ }
+ }
+
+ public void pageDown() {
+ if (currentPosition < modes.size() - 1) {
+ int previousPosition = currentPosition;
+ currentPosition = modes.size() - 1;
+ showMode(previousPosition);
+ showMode(currentPosition);
+ modeScreenView.refreshTerminal();
+ }
+ }
+
+ private void showMode(int pos) {
+ modeScreenView.showMode(pos, modes.get(pos), pos == currentPosition, modeHeaderMaxLength,
+ modeDescriptionMaxLength);
+ }
+
+ public ScreenView transitionToNextScreen(boolean changeMode) {
+ Mode selectedMode = modes.get(currentPosition);
+ if (changeMode && currentMode != selectedMode) {
+ resultListener.accept(selectedMode);
+ }
+ return nextScreenView;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenView.java
new file mode 100644
index 00000000000..f3be127a954
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenView.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.mode;
+
+import java.util.List;
+import java.util.function.Consumer;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The screen where we can choose the {@link Mode} in the top screen.
+ */
+@InterfaceAudience.Private
+public class ModeScreenView extends AbstractScreenView {
+
+ private static final int SCREEN_DESCRIPTION_START_ROW = 0;
+ private static final int MODE_START_ROW = 4;
+
+ private final ModeScreenPresenter modeScreenPresenter;
+
+ public ModeScreenView(Screen screen, Terminal terminal, Mode currentMode,
+ Consumer resultListener, ScreenView nextScreenView) {
+ super(screen, terminal);
+ this.modeScreenPresenter = new ModeScreenPresenter(this, currentMode, resultListener,
+ nextScreenView);
+ }
+
+ @Override
+ public void init() {
+ modeScreenPresenter.init();
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ switch (keyPress.getType()) {
+ case Escape:
+ return modeScreenPresenter.transitionToNextScreen(false);
+
+ case Enter:
+ return modeScreenPresenter.transitionToNextScreen(true);
+
+ case ArrowUp:
+ modeScreenPresenter.arrowUp();
+ return this;
+
+ case ArrowDown:
+ modeScreenPresenter.arrowDown();
+ return this;
+
+ case PageUp:
+ case Home:
+ modeScreenPresenter.pageUp();
+ return this;
+
+ case PageDown:
+ case End:
+ modeScreenPresenter.pageDown();
+ return this;
+ }
+
+ if (keyPress.getType() != KeyPress.Type.Character) {
+ return this;
+ }
+
+ assert keyPress.getCharacter() != null;
+ switch (keyPress.getCharacter()) {
+ case 'q':
+ return modeScreenPresenter.transitionToNextScreen(false);
+ }
+
+ return this;
+ }
+
+ public void showModeScreen(Mode currentMode, List modes, int currentPosition,
+ int modeHeaderMaxLength, int modeDescriptionMaxLength) {
+ showScreenDescription(currentMode);
+
+ for (int i = 0; i < modes.size(); i++) {
+ showMode(i, modes.get(i), i == currentPosition,
+ modeHeaderMaxLength, modeDescriptionMaxLength);
+ }
+ }
+
+ private void showScreenDescription(Mode currentMode) {
+ TerminalPrinter printer = getTerminalPrinter(SCREEN_DESCRIPTION_START_ROW);
+ printer.startBold().print("Mode Management").stopBold().endOfLine();
+ printer.print("Current mode: ")
+ .startBold().print(currentMode.getHeader()).stopBold().endOfLine();
+ printer.print("Select mode followed by ").endOfLine();
+ }
+
+ public void showMode(int pos, Mode mode, boolean selected, int modeHeaderMaxLength,
+ int modeDescriptionMaxLength) {
+
+ String modeHeader = String.format("%-" + modeHeaderMaxLength + "s", mode.getHeader());
+ String modeDescription = String.format("%-" + modeDescriptionMaxLength + "s",
+ mode.getDescription());
+
+ int row = MODE_START_ROW + pos;
+ TerminalPrinter printer = getTerminalPrinter(row);
+ if (selected) {
+ printer.startHighlight().print(modeHeader).stopHighlight()
+ .printFormat(" = %s", modeDescription).endOfLine();
+ } else {
+ printer.printFormat("%s = %s", modeHeader, modeDescription).endOfLine();
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenter.java
new file mode 100644
index 00000000000..6c6bf1c1b21
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenter.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the filter display mode.
+ */
+@InterfaceAudience.Private
+public class FilterDisplayModeScreenPresenter {
+
+ private final FilterDisplayModeScreenView filterDisplayModeScreenView;
+ private final List filters;
+ private final ScreenView nextScreenView;
+
+ public FilterDisplayModeScreenPresenter(FilterDisplayModeScreenView filterDisplayModeScreenView,
+ List filters, ScreenView nextScreenView) {
+ this.filterDisplayModeScreenView = Objects.requireNonNull(filterDisplayModeScreenView);
+ this.filters = Collections.unmodifiableList(new ArrayList<>(Objects.requireNonNull(filters)));
+ this.nextScreenView = Objects.requireNonNull(nextScreenView);
+ }
+
+ public void init() {
+ filterDisplayModeScreenView.showFilters(filters);
+ filterDisplayModeScreenView.refreshTerminal();
+ }
+
+ public ScreenView returnToNextScreen() {
+ return nextScreenView;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenView.java
new file mode 100644
index 00000000000..c186d73ca36
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenView.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The filter display mode in the top screen.
+ *
+ * Exit if Enter key is pressed.
+ */
+@InterfaceAudience.Private
+public class FilterDisplayModeScreenView extends AbstractScreenView {
+
+ private final int row;
+ private final FilterDisplayModeScreenPresenter filterDisplayModeScreenPresenter;
+
+ public FilterDisplayModeScreenView(Screen screen, Terminal terminal, int row,
+ List filters, ScreenView nextScreenView) {
+ super(screen, terminal);
+ this.row = row;
+ this.filterDisplayModeScreenPresenter =
+ new FilterDisplayModeScreenPresenter(this, filters, nextScreenView);
+ }
+
+ @Override
+ public void init() {
+ filterDisplayModeScreenPresenter.init();
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ switch (keyPress.getType()) {
+ case Enter:
+ return filterDisplayModeScreenPresenter.returnToNextScreen();
+ }
+ return this;
+ }
+
+ public void showFilters(List filters) {
+ String filtersString = "none";
+ if (!filters.isEmpty()) {
+ filtersString = String.join(" + ",
+ filters.stream().map(f -> String.format("'%s'", f)).collect(Collectors.toList()));
+ }
+
+ getTerminalPrinter(row).startBold().print(" to resume, filters: " + filtersString)
+ .stopBold().endOfLine();
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Header.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Header.java
new file mode 100644
index 00000000000..df672e9695d
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Header.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents headers for the metrics in the top screen.
+ */
+@InterfaceAudience.Private
+public class Header {
+ private final Field field;
+ private final int length;
+
+ public Header(Field field, int length) {
+ this.field = Objects.requireNonNull(field);
+ this.length = length;
+ }
+
+ public String format() {
+ return "%" + (field.isLeftJustify() ? "-" : "") + length + "s";
+ }
+
+ public Field getField() {
+ return field;
+ }
+
+ public int getLength() {
+ return length;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenter.java
new file mode 100644
index 00000000000..8ab858b995f
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenter.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import java.util.function.Function;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the input mode.
+ */
+@InterfaceAudience.Private
+public class InputModeScreenPresenter {
+ private final InputModeScreenView inputModeScreenView;
+ private final String message;
+ private final List histories;
+ private final Function resultListener;
+
+ private StringBuilder inputString = new StringBuilder();
+ private int cursorPosition;
+ private int historyPosition = -1;
+
+ public InputModeScreenPresenter(InputModeScreenView inputModeScreenView, String message,
+ @Nullable List histories, Function resultListener) {
+ this.inputModeScreenView = Objects.requireNonNull(inputModeScreenView);
+ this.message = Objects.requireNonNull(message);
+
+ if (histories != null) {
+ this.histories = Collections.unmodifiableList(new ArrayList<>(histories));
+ } else {
+ this.histories = Collections.emptyList();
+ }
+
+ this.resultListener = Objects.requireNonNull(resultListener);
+ }
+
+ public void init() {
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public ScreenView returnToNextScreen() {
+ inputModeScreenView.hideCursor();
+ String result = inputString.toString();
+
+ return resultListener.apply(result);
+ }
+
+ public void character(Character character) {
+ inputString.insert(cursorPosition, character);
+ cursorPosition += 1;
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void backspace() {
+ if (cursorPosition == 0) {
+ return;
+ }
+
+ inputString.deleteCharAt(cursorPosition - 1);
+ cursorPosition -= 1;
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void delete() {
+ if (inputString.length() == 0 || cursorPosition > inputString.length() - 1) {
+ return;
+ }
+
+ inputString.deleteCharAt(cursorPosition);
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void arrowLeft() {
+ if (cursorPosition == 0) {
+ return;
+ }
+
+ cursorPosition -= 1;
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void arrowRight() {
+ if (cursorPosition > inputString.length() - 1) {
+ return;
+ }
+
+ cursorPosition += 1;
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void home() {
+ cursorPosition = 0;
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void end() {
+ cursorPosition = inputString.length();
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void arrowUp() {
+ if (historyPosition == 0 || histories.isEmpty()) {
+ return;
+ }
+
+ if (historyPosition == -1) {
+ historyPosition = histories.size() - 1;
+ } else {
+ historyPosition -= 1;
+ }
+
+ inputString = new StringBuilder(histories.get(historyPosition));
+
+ cursorPosition = inputString.length();
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+
+ public void arrowDown() {
+ if (historyPosition == -1 || histories.isEmpty()) {
+ return;
+ }
+
+ if (historyPosition == histories.size() - 1) {
+ historyPosition = -1;
+ inputString = new StringBuilder();
+ } else {
+ historyPosition += 1;
+ inputString = new StringBuilder(histories.get(historyPosition));
+ }
+
+ cursorPosition = inputString.length();
+ inputModeScreenView.showInput(message, inputString.toString(), cursorPosition);
+ inputModeScreenView.refreshTerminal();
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenView.java
new file mode 100644
index 00000000000..ab64a8ade22
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenView.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.List;
+import java.util.function.Function;
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The input mode in the top screen.
+ */
+@InterfaceAudience.Private
+public class InputModeScreenView extends AbstractScreenView {
+
+ private final int row;
+ private final InputModeScreenPresenter inputModeScreenPresenter;
+
+ public InputModeScreenView(Screen screen, Terminal terminal, int row, String message,
+ List histories, Function resultListener) {
+ super(screen, terminal);
+ this.row = row;
+ this.inputModeScreenPresenter = new InputModeScreenPresenter(this, message, histories,
+ resultListener);
+ }
+
+ @Override
+ public void init() {
+ inputModeScreenPresenter.init();
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+
+ switch (keyPress.getType()) {
+ case Enter:
+ return inputModeScreenPresenter.returnToNextScreen();
+
+ case Character:
+ inputModeScreenPresenter.character(keyPress.getCharacter());
+ break;
+
+ case Backspace:
+ inputModeScreenPresenter.backspace();
+ break;
+
+ case Delete:
+ inputModeScreenPresenter.delete();
+ break;
+
+ case ArrowLeft:
+ inputModeScreenPresenter.arrowLeft();
+ break;
+
+ case ArrowRight:
+ inputModeScreenPresenter.arrowRight();
+ break;
+
+ case Home:
+ inputModeScreenPresenter.home();
+ break;
+
+ case End:
+ inputModeScreenPresenter.end();
+ break;
+
+ case ArrowUp:
+ inputModeScreenPresenter.arrowUp();
+ break;
+
+ case ArrowDown:
+ inputModeScreenPresenter.arrowDown();
+ break;
+
+ default:
+ break;
+ }
+ return this;
+ }
+
+ public void showInput(String message, String inputString, int cursorPosition) {
+ getTerminalPrinter(row).startBold().print(message).stopBold().print(" ").print(inputString)
+ .endOfLine();
+ setCursorPosition(message.length() + 1 + cursorPosition, row);
+ refreshTerminal();
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenter.java
new file mode 100644
index 00000000000..174a15a4843
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenter.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the message mode.
+ *
+ * Exit after 2 seconds or if any key is pressed.
+ */
+@InterfaceAudience.Private
+public class MessageModeScreenPresenter {
+
+ private final MessageModeScreenView messageModeScreenView;
+ private final String message;
+ private final ScreenView nextScreenView;
+
+ public MessageModeScreenPresenter(MessageModeScreenView messageModeScreenView, String message,
+ ScreenView nextScreenView) {
+ this.messageModeScreenView = Objects.requireNonNull(messageModeScreenView);
+ this.message = Objects.requireNonNull(message);
+ this.nextScreenView = Objects.requireNonNull(nextScreenView);
+ }
+
+ public void init() {
+ messageModeScreenView.showMessage(message);
+ messageModeScreenView.refreshTerminal();
+ }
+
+ public ScreenView returnToNextScreen() {
+ return nextScreenView;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenView.java
new file mode 100644
index 00000000000..0dfa388fad0
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenView.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The message mode in the top screen.
+ */
+@InterfaceAudience.Private
+public class MessageModeScreenView extends AbstractScreenView {
+
+ private final int row;
+ private final MessageModeScreenPresenter messageModeScreenPresenter;
+
+ public MessageModeScreenView(Screen screen, Terminal terminal, int row, String message,
+ ScreenView nextScreenView) {
+ super(screen, terminal);
+ this.row = row;
+ this.messageModeScreenPresenter =
+ new MessageModeScreenPresenter(this, message, nextScreenView);
+ }
+
+ @Override
+ public void init() {
+ messageModeScreenPresenter.init();
+ setTimer(2000);
+ }
+
+ @Override
+ public ScreenView handleTimer() {
+ return messageModeScreenPresenter.returnToNextScreen();
+ }
+
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ cancelTimer();
+ return messageModeScreenPresenter.returnToNextScreen();
+ }
+
+ public void showMessage(String message) {
+ getTerminalPrinter(row).startHighlight().print(" ").print(message).print(" ").stopHighlight()
+ .endOfLine();
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Paging.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Paging.java
new file mode 100644
index 00000000000..b95e6f480e6
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Paging.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Utility class for paging for the metrics.
+ */
+@InterfaceAudience.Private
+public class Paging {
+ private int currentPosition;
+ private int pageStartPosition;
+ private int pageEndPosition;
+
+ private int pageSize;
+ private int recordsSize;
+
+ public void init() {
+ currentPosition = 0;
+ pageStartPosition = 0;
+ pageEndPosition = Math.min(pageSize, recordsSize);
+ }
+
+ public void updatePageSize(int pageSize) {
+ this.pageSize = pageSize;
+
+ if (pageSize == 0) {
+ pageStartPosition = 0;
+ pageEndPosition = 0;
+ } else {
+ pageEndPosition = pageStartPosition + pageSize;
+ keepConsistent();
+ }
+ }
+
+ public void updateRecordsSize(int recordsSize) {
+ if (this.recordsSize == 0) {
+ currentPosition = 0;
+ pageStartPosition = 0;
+ pageEndPosition = Math.min(pageSize, recordsSize);
+ this.recordsSize = recordsSize;
+ } else if (recordsSize == 0) {
+ currentPosition = 0;
+ pageStartPosition = 0;
+ pageEndPosition = 0;
+ this.recordsSize = recordsSize;
+ } else {
+ this.recordsSize = recordsSize;
+ if (pageSize > 0) {
+ pageEndPosition = pageStartPosition + pageSize;
+ keepConsistent();
+ }
+ }
+ }
+
+ public void arrowUp() {
+ if (currentPosition > 0) {
+ currentPosition -= 1;
+ if (pageSize > 0) {
+ keepConsistent();
+ }
+ }
+ }
+
+ public void arrowDown() {
+ if (currentPosition < recordsSize - 1) {
+ currentPosition += 1;
+ if (pageSize > 0) {
+ keepConsistent();
+ }
+ }
+ }
+
+ public void pageUp() {
+ if (pageSize > 0 && currentPosition > 0) {
+ currentPosition -= pageSize;
+ if (currentPosition < 0) {
+ currentPosition = 0;
+ }
+ keepConsistent();
+ }
+ }
+
+ public void pageDown() {
+ if (pageSize > 0 && currentPosition < recordsSize - 1) {
+
+ currentPosition = currentPosition + pageSize;
+ if (currentPosition >= recordsSize) {
+ currentPosition = recordsSize - 1;
+ }
+
+ pageStartPosition = currentPosition;
+ pageEndPosition = pageStartPosition + pageSize;
+ keepConsistent();
+ }
+ }
+
+ private void keepConsistent() {
+ if (currentPosition < pageStartPosition) {
+ pageStartPosition = currentPosition;
+ pageEndPosition = pageStartPosition + pageSize;
+ } else if (currentPosition > recordsSize - 1) {
+ currentPosition = recordsSize - 1;
+ pageEndPosition = recordsSize;
+ pageStartPosition = pageEndPosition - pageSize;
+ } else if (currentPosition > pageEndPosition - 1) {
+ pageEndPosition = currentPosition + 1;
+ pageStartPosition = pageEndPosition - pageSize;
+ }
+
+ if (pageStartPosition < 0) {
+ pageStartPosition = 0;
+ }
+
+ if (pageEndPosition > recordsSize) {
+ pageEndPosition = recordsSize;
+ pageStartPosition = pageEndPosition - pageSize;
+ if (pageStartPosition < 0) {
+ pageStartPosition = 0;
+ }
+ }
+ }
+
+ public int getCurrentPosition() {
+ return currentPosition;
+ }
+
+ public int getPageStartPosition() {
+ return pageStartPosition;
+ }
+
+ public int getPageEndPosition() {
+ return pageEndPosition;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Summary.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Summary.java
new file mode 100644
index 00000000000..03598f66fb4
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Summary.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents the summary of the metrics.
+ */
+@InterfaceAudience.Private
+public class Summary {
+ private final String currentTime;
+ private final String version;
+ private final String clusterId;
+ private final int servers;
+ private final int liveServers;
+ private final int deadServers;
+ private final int regionCount;
+ private final int ritCount;
+ private final double averageLoad;
+ private final long aggregateRequestPerSecond;
+
+ public Summary(String currentTime, String version, String clusterId, int servers,
+ int liveServers, int deadServers, int regionCount, int ritCount, double averageLoad,
+ long aggregateRequestPerSecond) {
+ this.currentTime = Objects.requireNonNull(currentTime);
+ this.version = Objects.requireNonNull(version);
+ this.clusterId = Objects.requireNonNull(clusterId);
+ this.servers = servers;
+ this.liveServers = liveServers;
+ this.deadServers = deadServers;
+ this.regionCount = regionCount;
+ this.ritCount = ritCount;
+ this.averageLoad = averageLoad;
+ this.aggregateRequestPerSecond = aggregateRequestPerSecond;
+ }
+
+ public String getCurrentTime() {
+ return currentTime;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public String getClusterId() {
+ return clusterId;
+ }
+
+ public int getServers() {
+ return servers;
+ }
+
+ public int getLiveServers() {
+ return liveServers;
+ }
+
+ public int getDeadServers() {
+ return deadServers;
+ }
+
+ public int getRegionCount() {
+ return regionCount;
+ }
+
+ public int getRitCount() {
+ return ritCount;
+ }
+
+ public double getAverageLoad() {
+ return averageLoad;
+ }
+
+ public long getAggregateRequestPerSecond() {
+ return aggregateRequestPerSecond;
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModel.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModel.java
new file mode 100644
index 00000000000..42e81e156fb
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModel.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+import org.apache.commons.lang3.time.DateFormatUtils;
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.hbtop.field.FieldValue;
+import org.apache.hadoop.hbase.hbtop.mode.DrillDownInfo;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * The data and business logic for the top screen.
+ */
+@InterfaceAudience.Private
+public class TopScreenModel {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(TopScreenModel.class);
+
+ private final Admin admin;
+
+ private Mode currentMode;
+ private Field currentSortField;
+ private List fieldInfos;
+ private List fields;
+
+ private Summary summary;
+ private List records;
+
+ private final List filters = new ArrayList<>();
+ private final List filterHistories = new ArrayList<>();
+
+ private boolean ascendingSort;
+
+ public TopScreenModel(Admin admin, Mode initialMode) {
+ this.admin = Objects.requireNonNull(admin);
+ switchMode(Objects.requireNonNull(initialMode), null, false);
+ }
+
+ public void switchMode(Mode nextMode, List initialFilters,
+ boolean keepSortFieldAndSortOrderIfPossible) {
+
+ currentMode = nextMode;
+ fieldInfos = Collections.unmodifiableList(new ArrayList<>(currentMode.getFieldInfos()));
+ fields = Collections.unmodifiableList(currentMode.getFieldInfos().stream()
+ .map(FieldInfo::getField).collect(Collectors.toList()));
+
+ if (keepSortFieldAndSortOrderIfPossible) {
+ boolean match = fields.stream().anyMatch(f -> f == currentSortField);
+ if (!match) {
+ currentSortField = nextMode.getDefaultSortField();
+ ascendingSort = false;
+ }
+ } else {
+ currentSortField = nextMode.getDefaultSortField();
+ ascendingSort = false;
+ }
+
+ clearFilters();
+ if (initialFilters != null) {
+ filters.addAll(initialFilters);
+ }
+ }
+
+ public void setSortFieldAndFields(Field sortField, List fields) {
+ this.currentSortField = sortField;
+ this.fields = Collections.unmodifiableList(new ArrayList<>(fields));
+ }
+
+ /*
+ * HBTop only calls this from a single thread, and if that ever changes, this needs
+ * synchronization
+ */
+ public void refreshMetricsData() {
+ ClusterMetrics clusterMetrics;
+ try {
+ clusterMetrics = admin.getClusterMetrics();
+ } catch (Exception e) {
+ LOGGER.error("Unable to get cluster metrics", e);
+ return;
+ }
+
+ refreshSummary(clusterMetrics);
+ refreshRecords(clusterMetrics);
+ }
+
+ private void refreshSummary(ClusterMetrics clusterMetrics) {
+ String currentTime = DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT
+ .format(System.currentTimeMillis());
+ String version = clusterMetrics.getHBaseVersion();
+ String clusterId = clusterMetrics.getClusterId();
+ int liveServers = clusterMetrics.getLiveServerMetrics().size();
+ int deadServers = clusterMetrics.getDeadServerNames().size();
+ int regionCount = clusterMetrics.getRegionCount();
+ int ritCount = clusterMetrics.getRegionStatesInTransition().size();
+ double averageLoad = clusterMetrics.getAverageLoad();
+ long aggregateRequestPerSecond = clusterMetrics.getLiveServerMetrics().entrySet().stream()
+ .mapToLong(e -> e.getValue().getRequestCountPerSecond()).sum();
+
+ summary = new Summary(currentTime, version, clusterId, liveServers + deadServers,
+ liveServers, deadServers, regionCount, ritCount, averageLoad, aggregateRequestPerSecond);
+ }
+
+ private void refreshRecords(ClusterMetrics clusterMetrics) {
+ List records = currentMode.getRecords(clusterMetrics);
+
+ // Filter and sort
+ records = records.stream()
+ .filter(r -> filters.stream().allMatch(f -> f.execute(r)))
+ .sorted((recordLeft, recordRight) -> {
+ FieldValue left = recordLeft.get(currentSortField);
+ FieldValue right = recordRight.get(currentSortField);
+ return (ascendingSort ? 1 : -1) * left.compareTo(right);
+ }).collect(Collectors.toList());
+
+ this.records = Collections.unmodifiableList(records);
+ }
+
+ public void switchSortOrder() {
+ ascendingSort = !ascendingSort;
+ }
+
+ public boolean addFilter(String filterString, boolean ignoreCase) {
+ RecordFilter filter = RecordFilter.parse(filterString, fields, ignoreCase);
+ if (filter == null) {
+ return false;
+ }
+
+ filters.add(filter);
+ filterHistories.add(filterString);
+ return true;
+ }
+
+ public void clearFilters() {
+ filters.clear();
+ }
+
+ public boolean drillDown(Record selectedRecord) {
+ DrillDownInfo drillDownInfo = currentMode.drillDown(selectedRecord);
+ if (drillDownInfo == null) {
+ return false;
+ }
+ switchMode(drillDownInfo.getNextMode(), drillDownInfo.getInitialFilters(), true);
+ return true;
+ }
+
+ public Mode getCurrentMode() {
+ return currentMode;
+ }
+
+ public Field getCurrentSortField() {
+ return currentSortField;
+ }
+
+ public List getFieldInfos() {
+ return fieldInfos;
+ }
+
+ public List getFields() {
+ return fields;
+ }
+
+ public Summary getSummary() {
+ return summary;
+ }
+
+ public List getRecords() {
+ return records;
+ }
+
+ public List getFilters() {
+ return Collections.unmodifiableList(filters);
+ }
+
+ public List getFilterHistories() {
+ return Collections.unmodifiableList(filterHistories);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenter.java
new file mode 100644
index 00000000000..d435f5c1dfc
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenter.java
@@ -0,0 +1,330 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.field.FieldScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.help.HelpScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.mode.ModeScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The presentation logic for the top screen.
+ */
+@InterfaceAudience.Private
+public class TopScreenPresenter {
+ private final TopScreenView topScreenView;
+ private final AtomicLong refreshDelay;
+ private long lastRefreshTimestamp;
+
+ private final AtomicBoolean adjustFieldLength = new AtomicBoolean(true);
+ private final TopScreenModel topScreenModel;
+ private int terminalLength;
+ private int horizontalScroll;
+ private final Paging paging = new Paging();
+
+ private final EnumMap fieldDisplayMap = new EnumMap<>(Field.class);
+ private final EnumMap fieldLengthMap = new EnumMap<>(Field.class);
+
+ public TopScreenPresenter(TopScreenView topScreenView, long initialRefreshDelay,
+ TopScreenModel topScreenModel) {
+ this.topScreenView = Objects.requireNonNull(topScreenView);
+ this.refreshDelay = new AtomicLong(initialRefreshDelay);
+ this.topScreenModel = Objects.requireNonNull(topScreenModel);
+
+ initFieldDisplayMapAndFieldLengthMap();
+ }
+
+ public void init() {
+ terminalLength = topScreenView.getTerminalSize().getColumns();
+ paging.updatePageSize(topScreenView.getPageSize());
+ topScreenView.hideCursor();
+ }
+
+ public long refresh(boolean force) {
+ if (!force) {
+ long delay = System.currentTimeMillis() - lastRefreshTimestamp;
+ if (delay < refreshDelay.get()) {
+ return refreshDelay.get() - delay;
+ }
+ }
+
+ TerminalSize newTerminalSize = topScreenView.doResizeIfNecessary();
+ if (newTerminalSize != null) {
+ terminalLength = newTerminalSize.getColumns();
+ paging.updatePageSize(topScreenView.getPageSize());
+ topScreenView.clearTerminal();
+ }
+
+ topScreenModel.refreshMetricsData();
+ paging.updateRecordsSize(topScreenModel.getRecords().size());
+
+ adjustFieldLengthIfNeeded();
+
+ topScreenView.showTopScreen(topScreenModel.getSummary(), getDisplayedHeaders(),
+ getDisplayedRecords(), getSelectedRecord());
+
+ topScreenView.refreshTerminal();
+
+ lastRefreshTimestamp = System.currentTimeMillis();
+ return refreshDelay.get();
+ }
+
+ public void adjustFieldLength() {
+ adjustFieldLength.set(true);
+ refresh(true);
+ }
+
+ private void adjustFieldLengthIfNeeded() {
+ if (adjustFieldLength.get()) {
+ adjustFieldLength.set(false);
+
+ for (Field f : topScreenModel.getFields()) {
+ if (f.isAutoAdjust()) {
+ int maxLength = topScreenModel.getRecords().stream()
+ .map(r -> r.get(f).asString().length())
+ .max(Integer::compareTo).orElse(0);
+ fieldLengthMap.put(f, Math.max(maxLength, f.getHeader().length()));
+ }
+ }
+ }
+ }
+
+ private List getDisplayedHeaders() {
+ List displayFields =
+ topScreenModel.getFields().stream()
+ .filter(fieldDisplayMap::get).collect(Collectors.toList());
+
+ if (displayFields.isEmpty()) {
+ horizontalScroll = 0;
+ } else if (horizontalScroll > displayFields.size() - 1) {
+ horizontalScroll = displayFields.size() - 1;
+ }
+
+ List ret = new ArrayList<>();
+
+ int length = 0;
+ for (int i = horizontalScroll; i < displayFields.size(); i++) {
+ Field field = displayFields.get(i);
+ int fieldLength = fieldLengthMap.get(field);
+
+ length += fieldLength + 1;
+ if (length > terminalLength) {
+ break;
+ }
+ ret.add(new Header(field, fieldLength));
+ }
+
+ return ret;
+ }
+
+ private List getDisplayedRecords() {
+ List ret = new ArrayList<>();
+ for (int i = paging.getPageStartPosition(); i < paging.getPageEndPosition(); i++) {
+ ret.add(topScreenModel.getRecords().get(i));
+ }
+ return ret;
+ }
+
+ private Record getSelectedRecord() {
+ if (topScreenModel.getRecords().isEmpty()) {
+ return null;
+ }
+ return topScreenModel.getRecords().get(paging.getCurrentPosition());
+ }
+
+ public void arrowUp() {
+ paging.arrowUp();
+ refresh(true);
+ }
+
+ public void arrowDown() {
+ paging.arrowDown();
+ refresh(true);
+ }
+
+ public void pageUp() {
+ paging.pageUp();
+ refresh(true);
+ }
+
+ public void pageDown() {
+ paging.pageDown();
+ refresh(true);
+ }
+
+ public void arrowLeft() {
+ if (horizontalScroll > 0) {
+ horizontalScroll -= 1;
+ }
+ refresh(true);
+ }
+
+ public void arrowRight() {
+ if (horizontalScroll < getHeaderSize() - 1) {
+ horizontalScroll += 1;
+ }
+ refresh(true);
+ }
+
+ public void home() {
+ if (horizontalScroll > 0) {
+ horizontalScroll = 0;
+ }
+ refresh(true);
+ }
+
+ public void end() {
+ int headerSize = getHeaderSize();
+ horizontalScroll = headerSize == 0 ? 0 : headerSize - 1;
+ refresh(true);
+ }
+
+ private int getHeaderSize() {
+ return (int) topScreenModel.getFields().stream()
+ .filter(fieldDisplayMap::get).count();
+ }
+
+ public void switchSortOrder() {
+ topScreenModel.switchSortOrder();
+ refresh(true);
+ }
+
+ public ScreenView transitionToHelpScreen(Screen screen, Terminal terminal) {
+ return new HelpScreenView(screen, terminal, refreshDelay.get(), topScreenView);
+ }
+
+ public ScreenView transitionToModeScreen(Screen screen, Terminal terminal) {
+ return new ModeScreenView(screen, terminal, topScreenModel.getCurrentMode(), this::switchMode,
+ topScreenView);
+ }
+
+ public ScreenView transitionToFieldScreen(Screen screen, Terminal terminal) {
+ return new FieldScreenView(screen, terminal,
+ topScreenModel.getCurrentSortField(), topScreenModel.getFields(),
+ fieldDisplayMap,
+ (sortKey, fields, fieldDisplayMap) -> {
+ topScreenModel.setSortFieldAndFields(sortKey, fields);
+ this.fieldDisplayMap.clear();
+ this.fieldDisplayMap.putAll(fieldDisplayMap);
+ }, topScreenView);
+ }
+
+ private void switchMode(Mode nextMode) {
+ topScreenModel.switchMode(nextMode, null, false);
+ reset();
+ }
+
+ public void drillDown() {
+ Record selectedRecord = getSelectedRecord();
+ if (selectedRecord == null) {
+ return;
+ }
+ if (topScreenModel.drillDown(selectedRecord)) {
+ reset();
+ refresh(true);
+ }
+ }
+
+ private void reset() {
+ initFieldDisplayMapAndFieldLengthMap();
+ adjustFieldLength.set(true);
+ paging.init();
+ horizontalScroll = 0;
+ topScreenView.clearTerminal();
+ }
+
+ private void initFieldDisplayMapAndFieldLengthMap() {
+ fieldDisplayMap.clear();
+ fieldLengthMap.clear();
+ for (FieldInfo fieldInfo : topScreenModel.getFieldInfos()) {
+ fieldDisplayMap.put(fieldInfo.getField(), fieldInfo.isDisplayByDefault());
+ fieldLengthMap.put(fieldInfo.getField(), fieldInfo.getDefaultLength());
+ }
+ }
+
+ public ScreenView goToMessageMode(Screen screen, Terminal terminal, int row, String message) {
+ return new MessageModeScreenView(screen, terminal, row, message, topScreenView);
+ }
+
+ public ScreenView goToInputModeForRefreshDelay(Screen screen, Terminal terminal, int row) {
+ return new InputModeScreenView(screen, terminal, row,
+ "Change refresh delay from " + (double) refreshDelay.get() / 1000 + " to", null,
+ (inputString) -> {
+ if (inputString.isEmpty()) {
+ return topScreenView;
+ }
+
+ double delay;
+ try {
+ delay = Double.valueOf(inputString);
+ } catch (NumberFormatException e) {
+ return goToMessageMode(screen, terminal, row, "Unacceptable floating point");
+ }
+
+ refreshDelay.set((long) (delay * 1000));
+ return topScreenView;
+ });
+ }
+
+ public ScreenView goToInputModeForFilter(Screen screen, Terminal terminal, int row,
+ boolean ignoreCase) {
+ return new InputModeScreenView(screen, terminal, row,
+ "add filter #" + (topScreenModel.getFilters().size() + 1) +
+ " (" + (ignoreCase ? "ignoring case" : "case sensitive") + ") as: [!]FLD?VAL",
+ topScreenModel.getFilterHistories(),
+ (inputString) -> {
+ if (inputString.isEmpty()) {
+ return topScreenView;
+ }
+
+ if (!topScreenModel.addFilter(inputString, ignoreCase)) {
+ return goToMessageMode(screen, terminal, row, "Unacceptable filter expression");
+ }
+
+ paging.init();
+ return topScreenView;
+ });
+ }
+
+ public void clearFilters() {
+ topScreenModel.clearFilters();
+ paging.init();
+ refresh(true);
+ }
+
+ public ScreenView goToFilterDisplayMode(Screen screen, Terminal terminal, int row) {
+ return new FilterDisplayModeScreenView(screen, terminal, row, topScreenModel.getFilters(),
+ topScreenView);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenView.java
new file mode 100644
index 00000000000..0f52b378202
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenView.java
@@ -0,0 +1,300 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView;
+import org.apache.hadoop.hbase.hbtop.screen.Screen;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The screen that provides a dynamic real-time view for the HBase metrics.
+ *
+ * This shows the metric {@link Summary} and the metric {@link Record}s. The summary and the
+ * metrics are updated periodically (3 seconds by default).
+ */
+@InterfaceAudience.Private
+public class TopScreenView extends AbstractScreenView {
+
+ private static final int SUMMARY_START_ROW = 0;
+ private static final int SUMMARY_ROW_NUM = 7;
+ private static final int MESSAGE_ROW = 7;
+ private static final int RECORD_HEADER_ROW = 8;
+ private static final int RECORD_START_ROW = 9;
+
+ private final TopScreenPresenter topScreenPresenter;
+ private int pageSize;
+
+ public TopScreenView(Screen screen, Terminal terminal, long initialRefreshDelay, Admin admin,
+ Mode initialMode) {
+ super(screen, terminal);
+ this.topScreenPresenter = new TopScreenPresenter(this, initialRefreshDelay,
+ new TopScreenModel(admin, initialMode));
+ }
+
+ @Override
+ public void init() {
+ topScreenPresenter.init();
+ long delay = topScreenPresenter.refresh(true);
+ setTimer(delay);
+ }
+
+ @Override
+ public ScreenView handleTimer() {
+ long delay = topScreenPresenter.refresh(false);
+ setTimer(delay);
+ return this;
+ }
+
+ @Nullable
+ @Override
+ public ScreenView handleKeyPress(KeyPress keyPress) {
+ switch (keyPress.getType()) {
+ case Enter:
+ topScreenPresenter.refresh(true);
+ return this;
+
+ case ArrowUp:
+ topScreenPresenter.arrowUp();
+ return this;
+
+ case ArrowDown:
+ topScreenPresenter.arrowDown();
+ return this;
+
+ case ArrowLeft:
+ topScreenPresenter.arrowLeft();
+ return this;
+
+ case ArrowRight:
+ topScreenPresenter.arrowRight();
+ return this;
+
+ case PageUp:
+ topScreenPresenter.pageUp();
+ return this;
+
+ case PageDown:
+ topScreenPresenter.pageDown();
+ return this;
+
+ case Home:
+ topScreenPresenter.home();
+ return this;
+
+ case End:
+ topScreenPresenter.end();
+ return this;
+
+ case Escape:
+ return null;
+ }
+
+ if (keyPress.getType() != KeyPress.Type.Character) {
+ return unknownCommandMessage();
+ }
+
+ assert keyPress.getCharacter() != null;
+ switch (keyPress.getCharacter()) {
+ case 'R':
+ topScreenPresenter.switchSortOrder();
+ break;
+
+ case 'f':
+ cancelTimer();
+ return topScreenPresenter.transitionToFieldScreen(getScreen(), getTerminal());
+
+ case 'm':
+ cancelTimer();
+ return topScreenPresenter.transitionToModeScreen(getScreen(), getTerminal());
+
+ case 'h':
+ cancelTimer();
+ return topScreenPresenter.transitionToHelpScreen(getScreen(), getTerminal());
+
+ case 'd':
+ cancelTimer();
+ return topScreenPresenter.goToInputModeForRefreshDelay(getScreen(), getTerminal(),
+ MESSAGE_ROW);
+
+ case 'o':
+ cancelTimer();
+ if (keyPress.isCtrl()) {
+ return topScreenPresenter.goToFilterDisplayMode(getScreen(), getTerminal(), MESSAGE_ROW);
+ }
+ return topScreenPresenter.goToInputModeForFilter(getScreen(), getTerminal(), MESSAGE_ROW,
+ true);
+
+ case 'O':
+ cancelTimer();
+ return topScreenPresenter.goToInputModeForFilter(getScreen(), getTerminal(), MESSAGE_ROW,
+ false);
+
+ case '=':
+ topScreenPresenter.clearFilters();
+ break;
+
+ case 'X':
+ topScreenPresenter.adjustFieldLength();
+ break;
+
+ case 'i':
+ topScreenPresenter.drillDown();
+ break;
+
+ case 'q':
+ return null;
+
+ default:
+ return unknownCommandMessage();
+ }
+ return this;
+ }
+
+ public TerminalSize getTerminalSize() {
+ TerminalSize terminalSize = super.getTerminalSize();
+ updatePageSize(terminalSize);
+ return terminalSize;
+ }
+
+ public TerminalSize doResizeIfNecessary() {
+ TerminalSize terminalSize = super.doResizeIfNecessary();
+ if (terminalSize == null) {
+ return null;
+ }
+ updatePageSize(terminalSize);
+ return terminalSize;
+ }
+
+ private void updatePageSize(TerminalSize terminalSize) {
+ pageSize = terminalSize.getRows() - SUMMARY_ROW_NUM - 2;
+ if (pageSize < 0) {
+ pageSize = 0;
+ }
+ }
+
+ public int getPageSize() {
+ return pageSize;
+ }
+
+ public void showTopScreen(Summary summary, List headers, List records,
+ Record selectedRecord) {
+ showSummary(summary);
+ clearMessage();
+ showHeaders(headers);
+ showRecords(headers, records, selectedRecord);
+ }
+
+ private void showSummary(Summary summary) {
+ TerminalPrinter printer = getTerminalPrinter(SUMMARY_START_ROW);
+ printer.print(String.format("HBase hbtop - %s", summary.getCurrentTime())).endOfLine();
+ printer.print(String.format("Version: %s", summary.getVersion())).endOfLine();
+ printer.print(String.format("Cluster ID: %s", summary.getClusterId())).endOfLine();
+ printer.print("RegionServer(s): ")
+ .startBold().print(Integer.toString(summary.getServers())).stopBold()
+ .print(" total, ")
+ .startBold().print(Integer.toString(summary.getLiveServers())).stopBold()
+ .print(" live, ")
+ .startBold().print(Integer.toString(summary.getDeadServers())).stopBold()
+ .print(" dead").endOfLine();
+ printer.print("RegionCount: ")
+ .startBold().print(Integer.toString(summary.getRegionCount())).stopBold()
+ .print(" total, ")
+ .startBold().print(Integer.toString(summary.getRitCount())).stopBold()
+ .print(" rit").endOfLine();
+ printer.print("Average Cluster Load: ")
+ .startBold().print(String.format("%.2f", summary.getAverageLoad())).stopBold().endOfLine();
+ printer.print("Aggregate Request/s: ")
+ .startBold().print(Long.toString(summary.getAggregateRequestPerSecond())).stopBold()
+ .endOfLine();
+ }
+
+ private void showRecords(List headers, List records, Record selectedRecord) {
+ TerminalPrinter printer = getTerminalPrinter(RECORD_START_ROW);
+ List buf = new ArrayList<>(headers.size());
+ for (int i = 0; i < pageSize; i++) {
+ if(i < records.size()) {
+ Record record = records.get(i);
+ buf.clear();
+ for (Header header : headers) {
+ String value = "";
+ if (record.containsKey(header.getField())) {
+ value = record.get(header.getField()).asString();
+ }
+
+ buf.add(limitLineLength(String.format(header.format(), value), header.getLength()));
+ }
+
+ String recordString = String.join(" ", buf);
+ if (!recordString.isEmpty()) {
+ recordString += " ";
+ }
+
+ if (record == selectedRecord) {
+ printer.startHighlight().print(recordString).stopHighlight().endOfLine();
+ } else {
+ printer.print(recordString).endOfLine();
+ }
+ } else {
+ printer.endOfLine();
+ }
+ }
+ }
+
+ private void showHeaders(List headers) {
+ String header = headers.stream()
+ .map(h -> String.format(h.format(), h.getField().getHeader()))
+ .collect(Collectors.joining(" "));
+
+ if (!header.isEmpty()) {
+ header += " ";
+ }
+
+ getTerminalPrinter(RECORD_HEADER_ROW).startHighlight().print(header).stopHighlight()
+ .endOfLine();
+ }
+
+ private String limitLineLength(String line, int length) {
+ if (line.length() > length) {
+ return line.substring(0, length - 1) + "+";
+ }
+ return line;
+ }
+
+ private void clearMessage() {
+ getTerminalPrinter(MESSAGE_ROW).print("").endOfLine();
+ }
+
+ private ScreenView unknownCommandMessage() {
+ cancelTimer();
+ return topScreenPresenter.goToMessageMode(getScreen(), getTerminal(), MESSAGE_ROW,
+ "Unknown command - try 'h' for help");
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Attributes.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Attributes.java
new file mode 100644
index 00000000000..9322aaa8157
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Attributes.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The attributes of text in the terminal.
+ */
+@InterfaceAudience.Private
+public class Attributes {
+ private boolean bold;
+ private boolean blink;
+ private boolean reverse;
+ private boolean underline;
+ private Color foregroundColor;
+ private Color backgroundColor;
+
+ public Attributes() {
+ reset();
+ }
+
+ public Attributes(Attributes attributes) {
+ set(attributes);
+ }
+
+ public boolean isBold() {
+ return bold;
+ }
+
+ public void setBold(boolean bold) {
+ this.bold = bold;
+ }
+
+ public boolean isBlink() {
+ return blink;
+ }
+
+ public void setBlink(boolean blink) {
+ this.blink = blink;
+ }
+
+ public boolean isReverse() {
+ return reverse;
+ }
+
+ public void setReverse(boolean reverse) {
+ this.reverse = reverse;
+ }
+
+ public boolean isUnderline() {
+ return underline;
+ }
+
+ public void setUnderline(boolean underline) {
+ this.underline = underline;
+ }
+
+ public Color getForegroundColor() {
+ return foregroundColor;
+ }
+
+ public void setForegroundColor(Color foregroundColor) {
+ this.foregroundColor = foregroundColor;
+ }
+
+ public Color getBackgroundColor() {
+ return backgroundColor;
+ }
+
+ public void setBackgroundColor(Color backgroundColor) {
+ this.backgroundColor = backgroundColor;
+ }
+
+ public void reset() {
+ bold = false;
+ blink = false;
+ reverse = false;
+ underline = false;
+ foregroundColor = Color.WHITE;
+ backgroundColor = Color.BLACK;
+ }
+
+ public void set(Attributes attributes) {
+ bold = attributes.bold;
+ blink = attributes.blink;
+ reverse = attributes.reverse;
+ underline = attributes.underline;
+ foregroundColor = attributes.foregroundColor;
+ backgroundColor = attributes.backgroundColor;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof Attributes)) {
+ return false;
+ }
+ Attributes that = (Attributes) o;
+ return bold == that.bold && blink == that.blink && reverse == that.reverse
+ && underline == that.underline && foregroundColor == that.foregroundColor
+ && backgroundColor == that.backgroundColor;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(bold, blink, reverse, underline, foregroundColor, backgroundColor);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Color.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Color.java
new file mode 100644
index 00000000000..843a315ab71
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Color.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Terminal color definitions.
+ */
+@InterfaceAudience.Private
+public enum Color {
+ BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/CursorPosition.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/CursorPosition.java
new file mode 100644
index 00000000000..775ff3d72e6
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/CursorPosition.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * A 2-d position in 'terminal space'.
+ */
+@InterfaceAudience.Private
+public class CursorPosition {
+ private final int column;
+ private final int row;
+
+ public CursorPosition(int column, int row) {
+ this.column = column;
+ this.row = row;
+ }
+
+ public int getColumn() {
+ return column;
+ }
+
+ public int getRow() {
+ return row;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof CursorPosition)) {
+ return false;
+ }
+ CursorPosition that = (CursorPosition) o;
+ return column == that.column && row == that.row;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(column, row);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/KeyPress.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/KeyPress.java
new file mode 100644
index 00000000000..d0be00c5868
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/KeyPress.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents the user pressing a key on the keyboard.
+ */
+@InterfaceAudience.Private
+public class KeyPress {
+ public enum Type {
+ Character,
+ Escape,
+ Backspace,
+ ArrowLeft,
+ ArrowRight,
+ ArrowUp,
+ ArrowDown,
+ Insert,
+ Delete,
+ Home,
+ End,
+ PageUp,
+ PageDown,
+ ReverseTab,
+ Tab,
+ Enter,
+ F1,
+ F2,
+ F3,
+ F4,
+ F5,
+ F6,
+ F7,
+ F8,
+ F9,
+ F10,
+ F11,
+ F12,
+ Unknown
+ }
+
+ private final Type type;
+ private final Character character;
+ private final boolean alt;
+ private final boolean ctrl;
+ private final boolean shift;
+
+ public KeyPress(Type type, @Nullable Character character, boolean alt, boolean ctrl,
+ boolean shift) {
+ this.type = Objects.requireNonNull(type);
+ this.character = character;
+ this.alt = alt;
+ this.ctrl = ctrl;
+ this.shift = shift;
+ }
+
+ public Type getType() {
+ return type;
+ }
+
+ @Nullable
+ public Character getCharacter() {
+ return character;
+ }
+
+ public boolean isAlt() {
+ return alt;
+ }
+
+ public boolean isCtrl() {
+ return ctrl;
+ }
+
+ public boolean isShift() {
+ return shift;
+ }
+
+ @Override
+ public String toString() {
+ return "KeyPress{" +
+ "type=" + type +
+ ", character=" + escape(character) +
+ ", alt=" + alt +
+ ", ctrl=" + ctrl +
+ ", shift=" + shift +
+ '}';
+ }
+
+ private String escape(Character character) {
+ if (character == null) {
+ return "null";
+ }
+
+ switch (character) {
+ case '\n':
+ return "\\n";
+
+ case '\b':
+ return "\\b";
+
+ case '\t':
+ return "\\t";
+
+ default:
+ return character.toString();
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Terminal.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Terminal.java
new file mode 100644
index 00000000000..248bb58681f
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Terminal.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.io.Closeable;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The terminal interface that is an abstraction of terminal screen.
+ */
+@InterfaceAudience.Private
+public interface Terminal extends Closeable {
+ void clear();
+ void refresh();
+ TerminalSize getSize();
+ @Nullable TerminalSize doResizeIfNecessary();
+ @Nullable KeyPress pollKeyPress();
+ CursorPosition getCursorPosition();
+ void setCursorPosition(int column, int row);
+ void hideCursor();
+ TerminalPrinter getTerminalPrinter(int startRow);
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinter.java
new file mode 100644
index 00000000000..66fb55875b0
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinter.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The interface responsible for printing to the terminal.
+ */
+@InterfaceAudience.Private
+public interface TerminalPrinter {
+ TerminalPrinter print(String value);
+
+ default TerminalPrinter print(Object value) {
+ print(value.toString());
+ return this;
+ }
+
+ default TerminalPrinter print(char value) {
+ print(Character.toString(value));
+ return this;
+ }
+
+ default TerminalPrinter print(short value) {
+ print(Short.toString(value));
+ return this;
+ }
+
+ default TerminalPrinter print(int value) {
+ print(Integer.toString(value));
+ return this;
+ }
+
+ default TerminalPrinter print(long value) {
+ print(Long.toString(value));
+ return this;
+ }
+
+ default TerminalPrinter print(float value) {
+ print(Float.toString(value));
+ return this;
+ }
+
+ default TerminalPrinter print(double value) {
+ print(Double.toString(value));
+ return this;
+ }
+
+ default TerminalPrinter printFormat(String format, Object... args) {
+ print(String.format(format, args));
+ return this;
+ }
+
+ TerminalPrinter startHighlight();
+
+ TerminalPrinter stopHighlight();
+
+ TerminalPrinter startBold();
+
+ TerminalPrinter stopBold();
+
+ void endOfLine();
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalSize.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalSize.java
new file mode 100644
index 00000000000..f7e55dde7b5
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalSize.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import java.util.Objects;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Terminal dimensions in 2-d space, measured in number of rows and columns.
+ */
+@InterfaceAudience.Private
+public class TerminalSize {
+ private final int columns;
+ private final int rows;
+
+ public TerminalSize(int columns, int rows) {
+ this.columns = columns;
+ this.rows = rows;
+ }
+
+ public int getColumns() {
+ return columns;
+ }
+
+ public int getRows() {
+ return rows;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof TerminalSize)) {
+ return false;
+ }
+ TerminalSize that = (TerminalSize) o;
+ return columns == that.columns && rows == that.rows;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(columns, rows);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/Cell.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/Cell.java
new file mode 100644
index 00000000000..de61477ce33
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/Cell.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal.impl;
+
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.terminal.Attributes;
+import org.apache.hadoop.hbase.hbtop.terminal.Color;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents a single text cell of the terminal.
+ */
+@InterfaceAudience.Private
+public class Cell {
+ private static final char UNSET_VALUE = (char) 65535;
+ private static final char END_OF_LINE = '\0';
+
+ private final Attributes attributes;
+ private char ch;
+
+ public Cell() {
+ attributes = new Attributes();
+ ch = ' ';
+ }
+
+ public char getChar() {
+ return ch;
+ }
+
+ public void setChar(char ch) {
+ this.ch = ch;
+ }
+
+ public void reset() {
+ attributes.reset();
+ ch = ' ';
+ }
+
+ public void unset() {
+ attributes.reset();
+ ch = UNSET_VALUE;
+ }
+
+ public void endOfLine() {
+ attributes.reset();
+ ch = END_OF_LINE;
+ }
+
+ public boolean isEndOfLine() {
+ return ch == END_OF_LINE;
+ }
+
+ public void set(Cell cell) {
+ attributes.set(cell.attributes);
+ this.ch = cell.ch;
+ }
+
+ public Attributes getAttributes() {
+ return new Attributes(attributes);
+ }
+
+ public void setAttributes(Attributes attributes) {
+ this.attributes.set(attributes);
+ }
+
+ public boolean isBold() {
+ return attributes.isBold();
+ }
+
+ public boolean isBlink() {
+ return attributes.isBlink();
+ }
+
+ public boolean isReverse() {
+ return attributes.isReverse();
+ }
+
+ public boolean isUnderline() {
+ return attributes.isUnderline();
+ }
+
+ public Color getForegroundColor() {
+ return attributes.getForegroundColor();
+ }
+
+ public Color getBackgroundColor() {
+ return attributes.getBackgroundColor();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof Cell)) {
+ return false;
+ }
+ Cell cell = (Cell) o;
+ return ch == cell.ch && attributes.equals(cell.attributes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(attributes, ch);
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/EscapeSequences.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/EscapeSequences.java
new file mode 100644
index 00000000000..52f8e374364
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/EscapeSequences.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal.impl;
+
+import org.apache.hadoop.hbase.hbtop.terminal.Color;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Utility class for escape sequences.
+ */
+@InterfaceAudience.Private
+public final class EscapeSequences {
+
+ private EscapeSequences() {
+ }
+
+ public static String clearAll() {
+ return "\033[0;37;40m\033[2J";
+ }
+
+ public static String setTitle(String title) {
+ return "\033]2;" + title + "\007";
+ }
+
+ public static String cursor(boolean on) {
+ if (on) {
+ return "\033[?25h";
+ }
+ return "\033[?25l";
+ }
+
+ public static String moveCursor(int column, int row) {
+ return String.format("\033[%d;%dH", row + 1, column + 1);
+ }
+
+ public static String clearRemainingLine() {
+ return "\033[0;37;40m\033[K";
+ }
+
+ public static String color(Color foregroundColor, Color backgroundColor, boolean bold,
+ boolean reverse, boolean blink, boolean underline) {
+
+ int foregroundColorValue = getColorValue(foregroundColor, true);
+ int backgroundColorValue = getColorValue(backgroundColor, false);
+
+ StringBuilder sb = new StringBuilder();
+ if (bold && reverse && blink && !underline) {
+ sb.append("\033[0;1;7;5;");
+ } else if (bold && reverse && !blink && !underline) {
+ sb.append("\033[0;1;7;");
+ } else if (!bold && reverse && blink && !underline) {
+ sb.append("\033[0;7;5;");
+ } else if (bold && !reverse && blink && !underline) {
+ sb.append("\033[0;1;5;");
+ } else if (bold && !reverse && !blink && !underline) {
+ sb.append("\033[0;1;");
+ } else if (!bold && reverse && !blink && !underline) {
+ sb.append("\033[0;7;");
+ } else if (!bold && !reverse && blink && !underline) {
+ sb.append("\033[0;5;");
+ } else if (bold && reverse && blink) {
+ sb.append("\033[0;1;7;5;4;");
+ } else if (bold && reverse) {
+ sb.append("\033[0;1;7;4;");
+ } else if (!bold && reverse && blink) {
+ sb.append("\033[0;7;5;4;");
+ } else if (bold && blink) {
+ sb.append("\033[0;1;5;4;");
+ } else if (bold) {
+ sb.append("\033[0;1;4;");
+ } else if (reverse) {
+ sb.append("\033[0;7;4;");
+ } else if (blink) {
+ sb.append("\033[0;5;4;");
+ } else if (underline) {
+ sb.append("\033[0;4;");
+ } else {
+ sb.append("\033[0;");
+ }
+ sb.append(String.format("%d;%dm", foregroundColorValue, backgroundColorValue));
+ return sb.toString();
+ }
+
+ private static int getColorValue(Color color, boolean foreground) {
+ int baseValue;
+ if (foreground) {
+ baseValue = 30;
+ } else { // background
+ baseValue = 40;
+ }
+
+ switch (color) {
+ case BLACK:
+ return baseValue;
+
+ case RED:
+ return baseValue + 1;
+
+ case GREEN:
+ return baseValue + 2;
+
+ case YELLOW:
+ return baseValue + 3;
+
+ case BLUE:
+ return baseValue + 4;
+
+ case MAGENTA:
+ return baseValue + 5;
+
+ case CYAN:
+ return baseValue + 6;
+
+ case WHITE:
+ return baseValue + 7;
+
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public static String normal() {
+ return "\033[0;37;40m";
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/KeyPressGenerator.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/KeyPressGenerator.java
new file mode 100644
index 00000000000..29122cbda0a
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/KeyPressGenerator.java
@@ -0,0 +1,488 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.nio.charset.StandardCharsets;
+import java.util.Queue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.util.Threads;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+
+/**
+ * This generates {@link KeyPress} objects from the given input stream and offers them to the
+ * given queue.
+ */
+@InterfaceAudience.Private
+public class KeyPressGenerator {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(KeyPressGenerator.class);
+
+ private enum ParseState {
+ START, ESCAPE, ESCAPE_SEQUENCE_PARAM1, ESCAPE_SEQUENCE_PARAM2
+ }
+
+ private final Queue keyPressQueue;
+ private final BlockingQueue inputCharacterQueue = new LinkedBlockingQueue<>();
+ private final Reader input;
+ private final InputStream inputStream;
+ private final AtomicBoolean stopThreads = new AtomicBoolean();
+ private final ExecutorService executorService;
+
+ private ParseState parseState;
+ private int param1;
+ private int param2;
+
+ public KeyPressGenerator(InputStream inputStream, Queue keyPressQueue) {
+ this.inputStream = inputStream;
+ input = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
+ this.keyPressQueue = keyPressQueue;
+
+ executorService = Executors.newFixedThreadPool(2, new ThreadFactoryBuilder()
+ .setNameFormat("KeyPressGenerator-%d").setDaemon(true)
+ .setUncaughtExceptionHandler(Threads.LOGGING_EXCEPTION_HANDLER).build());
+
+ initState();
+ }
+
+ public void start() {
+ executorService.submit(this::readerThread);
+ executorService.submit(this::generatorThread);
+ }
+
+ private void initState() {
+ parseState = ParseState.START;
+ param1 = 0;
+ param2 = 0;
+ }
+
+ private void readerThread() {
+ boolean done = false;
+ char[] readBuffer = new char[128];
+
+ while (!done && !stopThreads.get()) {
+ try {
+ int n = inputStream.available();
+ if (n > 0) {
+ if (readBuffer.length < n) {
+ readBuffer = new char[readBuffer.length * 2];
+ }
+
+ int rc = input.read(readBuffer, 0, readBuffer.length);
+ if (rc == -1) {
+ // EOF
+ done = true;
+ } else {
+ for (int i = 0; i < rc; i++) {
+ int ch = readBuffer[i];
+ inputCharacterQueue.offer((char) ch);
+ }
+ }
+ } else {
+ Thread.sleep(20);
+ }
+ } catch (InterruptedException ignored) {
+ } catch (IOException e) {
+ LOGGER.error("Caught an exception", e);
+ done = true;
+ }
+ }
+ }
+
+ private void generatorThread() {
+ while (!stopThreads.get()) {
+ Character ch;
+ try {
+ ch = inputCharacterQueue.poll(100, TimeUnit.MILLISECONDS);
+ } catch (InterruptedException ignored) {
+ continue;
+ }
+
+ if (ch == null) {
+ if (parseState == ParseState.ESCAPE) {
+ offer(new KeyPress(KeyPress.Type.Escape, null, false, false, false));
+ initState();
+ } else if (parseState != ParseState.START) {
+ offer(new KeyPress(KeyPress.Type.Unknown, null, false, false, false));
+ initState();
+ }
+ continue;
+ }
+
+ if (parseState == ParseState.START) {
+ if (ch == 0x1B) {
+ parseState = ParseState.ESCAPE;
+ continue;
+ }
+
+ switch (ch) {
+ case '\n':
+ case '\r':
+ offer(new KeyPress(KeyPress.Type.Enter, '\n', false, false, false));
+ continue;
+
+ case 0x08:
+ case 0x7F:
+ offer(new KeyPress(KeyPress.Type.Backspace, '\b', false, false, false));
+ continue;
+
+ case '\t':
+ offer(new KeyPress(KeyPress.Type.Tab, '\t', false, false, false));
+ continue;
+ }
+
+ if (ch < 32) {
+ ctrlAndCharacter(ch);
+ continue;
+ }
+
+ if (isPrintableChar(ch)) {
+ // Normal character
+ offer(new KeyPress(KeyPress.Type.Character, ch, false, false, false));
+ continue;
+ }
+
+ offer(new KeyPress(KeyPress.Type.Unknown, null, false, false, false));
+ continue;
+ }
+
+ if (parseState == ParseState.ESCAPE) {
+ if (ch == 0x1B) {
+ offer(new KeyPress(KeyPress.Type.Escape, null, false, false, false));
+ continue;
+ }
+
+ if (ch < 32 && ch != 0x08) {
+ ctrlAltAndCharacter(ch);
+ initState();
+ continue;
+ } else if (ch == 0x7F || ch == 0x08) {
+ offer(new KeyPress(KeyPress.Type.Backspace, '\b', false, false, false));
+ initState();
+ continue;
+ }
+
+ if (ch == '[' || ch == 'O') {
+ parseState = ParseState.ESCAPE_SEQUENCE_PARAM1;
+ continue;
+ }
+
+ if (isPrintableChar(ch)) {
+ // Alt and character
+ offer(new KeyPress(KeyPress.Type.Character, ch, true, false, false));
+ initState();
+ continue;
+ }
+
+ offer(new KeyPress(KeyPress.Type.Escape, null, false, false, false));
+ offer(new KeyPress(KeyPress.Type.Unknown, null, false, false, false));
+ initState();
+ continue;
+ }
+
+ escapeSequenceCharacter(ch);
+ }
+ }
+
+ private void ctrlAndCharacter(char ch) {
+ char ctrlCode;
+ switch (ch) {
+ case 0:
+ ctrlCode = ' ';
+ break;
+
+ case 28:
+ ctrlCode = '\\';
+ break;
+
+ case 29:
+ ctrlCode = ']';
+ break;
+
+ case 30:
+ ctrlCode = '^';
+ break;
+
+ case 31:
+ ctrlCode = '_';
+ break;
+
+ default:
+ ctrlCode = (char) ('a' - 1 + ch);
+ break;
+ }
+ offer(new KeyPress(KeyPress.Type.Character, ctrlCode, false, true, false));
+ }
+
+ private boolean isPrintableChar(char ch) {
+ if (Character.isISOControl(ch)) {
+ return false;
+ }
+ Character.UnicodeBlock block = Character.UnicodeBlock.of(ch);
+ return block != null && block != Character.UnicodeBlock.SPECIALS;
+ }
+
+ private void ctrlAltAndCharacter(char ch) {
+ char ctrlCode;
+ switch (ch) {
+ case 0:
+ ctrlCode = ' ';
+ break;
+
+ case 28:
+ ctrlCode = '\\';
+ break;
+
+ case 29:
+ ctrlCode = ']';
+ break;
+
+ case 30:
+ ctrlCode = '^';
+ break;
+
+ case 31:
+ ctrlCode = '_';
+ break;
+
+ default:
+ ctrlCode = (char) ('a' - 1 + ch);
+ break;
+ }
+ offer(new KeyPress(KeyPress.Type.Character, ctrlCode, true, true, false));
+ }
+
+ private void escapeSequenceCharacter(char ch) {
+ switch (parseState) {
+ case ESCAPE_SEQUENCE_PARAM1:
+ if (ch == ';') {
+ parseState = ParseState.ESCAPE_SEQUENCE_PARAM2;
+ } else if (Character.isDigit(ch)) {
+ param1 = param1 * 10 + Character.digit(ch, 10);
+ } else {
+ doneEscapeSequenceCharacter(ch);
+ }
+ break;
+
+ case ESCAPE_SEQUENCE_PARAM2:
+ if (Character.isDigit(ch)) {
+ param2 = param2 * 10 + Character.digit(ch, 10);
+ } else {
+ doneEscapeSequenceCharacter(ch);
+ }
+ break;
+
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ private void doneEscapeSequenceCharacter(char last) {
+ boolean alt = false;
+ boolean ctrl = false;
+ boolean shift = false;
+ if (param2 != 0) {
+ alt = isAlt(param2);
+ ctrl = isCtrl(param2);
+ shift = isShift(param2);
+ }
+
+ if (last != '~') {
+ switch (last) {
+ case 'A':
+ offer(new KeyPress(KeyPress.Type.ArrowUp, null, alt, ctrl, shift));
+ break;
+
+ case 'B':
+ offer(new KeyPress(KeyPress.Type.ArrowDown, null, alt, ctrl, shift));
+ break;
+
+ case 'C':
+ offer(new KeyPress(KeyPress.Type.ArrowRight, null, alt, ctrl, shift));
+ break;
+
+ case 'D':
+ offer(new KeyPress(KeyPress.Type.ArrowLeft, null, alt, ctrl, shift));
+ break;
+
+ case 'H':
+ offer(new KeyPress(KeyPress.Type.Home, null, alt, ctrl, shift));
+ break;
+
+ case 'F':
+ offer(new KeyPress(KeyPress.Type.End, null, alt, ctrl, shift));
+ break;
+
+ case 'P':
+ offer(new KeyPress(KeyPress.Type.F1, null, alt, ctrl, shift));
+ break;
+
+ case 'Q':
+ offer(new KeyPress(KeyPress.Type.F2, null, alt, ctrl, shift));
+ break;
+
+ case 'R':
+ offer(new KeyPress(KeyPress.Type.F3, null, alt, ctrl, shift));
+ break;
+
+ case 'S':
+ offer(new KeyPress(KeyPress.Type.F4, null, alt, ctrl, shift));
+ break;
+
+ case 'Z':
+ offer(new KeyPress(KeyPress.Type.ReverseTab, null, alt, ctrl, shift));
+ break;
+
+ default:
+ offer(new KeyPress(KeyPress.Type.Unknown, null, alt, ctrl, shift));
+ break;
+ }
+ initState();
+ return;
+ }
+
+ switch (param1) {
+ case 1:
+ offer(new KeyPress(KeyPress.Type.Home, null, alt, ctrl, shift));
+ break;
+
+ case 2:
+ offer(new KeyPress(KeyPress.Type.Insert, null, alt, ctrl, shift));
+ break;
+
+ case 3:
+ offer(new KeyPress(KeyPress.Type.Delete, null, alt, ctrl, shift));
+ break;
+
+ case 4:
+ offer(new KeyPress(KeyPress.Type.End, null, alt, ctrl, shift));
+ break;
+
+ case 5:
+ offer(new KeyPress(KeyPress.Type.PageUp, null, alt, ctrl, shift));
+ break;
+
+ case 6:
+ offer(new KeyPress(KeyPress.Type.PageDown, null, alt, ctrl, shift));
+ break;
+
+ case 11:
+ offer(new KeyPress(KeyPress.Type.F1, null, alt, ctrl, shift));
+ break;
+
+ case 12:
+ offer(new KeyPress(KeyPress.Type.F2, null, alt, ctrl, shift));
+ break;
+
+ case 13:
+ offer(new KeyPress(KeyPress.Type.F3, null, alt, ctrl, shift));
+ break;
+
+ case 14:
+ offer(new KeyPress(KeyPress.Type.F4, null, alt, ctrl, shift));
+ break;
+
+ case 15:
+ offer(new KeyPress(KeyPress.Type.F5, null, alt, ctrl, shift));
+ break;
+
+ case 17:
+ offer(new KeyPress(KeyPress.Type.F6, null, alt, ctrl, shift));
+ break;
+
+ case 18:
+ offer(new KeyPress(KeyPress.Type.F7, null, alt, ctrl, shift));
+ break;
+
+ case 19:
+ offer(new KeyPress(KeyPress.Type.F8, null, alt, ctrl, shift));
+ break;
+
+ case 20:
+ offer(new KeyPress(KeyPress.Type.F9, null, alt, ctrl, shift));
+ break;
+
+ case 21:
+ offer(new KeyPress(KeyPress.Type.F10, null, alt, ctrl, shift));
+ break;
+
+ case 23:
+ offer(new KeyPress(KeyPress.Type.F11, null, alt, ctrl, shift));
+ break;
+
+ case 24:
+ offer(new KeyPress(KeyPress.Type.F12, null, alt, ctrl, shift));
+ break;
+
+ default:
+ offer(new KeyPress(KeyPress.Type.Unknown, null, false, false, false));
+ break;
+ }
+
+ initState();
+ }
+
+ private boolean isShift(int param) {
+ return (param & 1) != 0;
+ }
+
+ private boolean isAlt(int param) {
+ return (param & 2) != 0;
+ }
+
+ private boolean isCtrl(int param) {
+ return (param & 4) != 0;
+ }
+
+ private void offer(KeyPress keyPress) {
+ // Handle ctrl + c
+ if (keyPress.isCtrl() && keyPress.getType() == KeyPress.Type.Character &&
+ keyPress.getCharacter() == 'c') {
+ System.exit(0);
+ }
+
+ keyPressQueue.offer(keyPress);
+ }
+
+ public void stop() {
+ stopThreads.set(true);
+
+ executorService.shutdown();
+ try {
+ while (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
+ LOGGER.warn("Waiting for thread-pool to terminate");
+ }
+ } catch (InterruptedException e) {
+ LOGGER.warn("Interrupted while waiting for thread-pool termination", e);
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/ScreenBuffer.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/ScreenBuffer.java
new file mode 100644
index 00000000000..8752c5fe689
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/ScreenBuffer.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal.impl;
+
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.clearRemainingLine;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.color;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.cursor;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.moveCursor;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.normal;
+
+import java.io.PrintWriter;
+import org.apache.hadoop.hbase.hbtop.terminal.Attributes;
+import org.apache.hadoop.hbase.hbtop.terminal.CursorPosition;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * Represents a buffer of the terminal screen for double-buffering.
+ */
+@InterfaceAudience.Private
+public class ScreenBuffer {
+ private int columns;
+ private int rows;
+
+ private Cell[][] buffer;
+ private Cell[][] physical;
+
+ private boolean cursorVisible;
+ private int cursorColumn;
+ private int cursorRow;
+
+ public void reallocate(int columns, int rows) {
+ buffer = new Cell[columns][rows];
+ physical = new Cell[columns][rows];
+
+ for (int row = 0; row < rows; row++) {
+ for (int column = 0; column < columns; column++) {
+ buffer[column][row] = new Cell();
+
+ physical[column][row] = new Cell();
+ physical[column][row].unset();
+ }
+ }
+
+ this.columns = columns;
+ this.rows = rows;
+ }
+
+ public void clear() {
+ for (int row = 0; row < rows; row++) {
+ for (int col = 0; col < columns; col++) {
+ buffer[col][row].reset();
+ }
+ }
+ }
+
+ public void flush(PrintWriter output) {
+ StringBuilder sb = new StringBuilder();
+
+ sb.append(normal());
+ Attributes attributes = new Attributes();
+ for (int row = 0; row < rows; row++) {
+ flushRow(row, sb, attributes);
+ }
+
+ if (cursorVisible && cursorRow >= 0 && cursorColumn >= 0 && cursorRow < rows &&
+ cursorColumn < columns) {
+ sb.append(cursor(true));
+ sb.append(moveCursor(cursorColumn, cursorRow));
+ } else {
+ sb.append(cursor(false));
+ }
+
+ output.write(sb.toString());
+ output.flush();
+ }
+
+ private void flushRow(int row, StringBuilder sb, Attributes lastAttributes) {
+ int lastColumn = -1;
+ for (int column = 0; column < columns; column++) {
+ Cell cell = buffer[column][row];
+ Cell pCell = physical[column][row];
+
+ if (!cell.equals(pCell)) {
+ if (lastColumn != column - 1 || lastColumn == -1) {
+ sb.append(moveCursor(column, row));
+ }
+
+ if (cell.isEndOfLine()) {
+ for (int i = column; i < columns; i++) {
+ physical[i][row].set(buffer[i][row]);
+ }
+
+ sb.append(clearRemainingLine());
+ lastAttributes.reset();
+ return;
+ }
+
+ if (!cell.getAttributes().equals(lastAttributes)) {
+ sb.append(color(cell.getForegroundColor(), cell.getBackgroundColor(), cell.isBold(),
+ cell.isReverse(), cell.isBlink(), cell.isUnderline()));
+ }
+
+ sb.append(cell.getChar());
+
+ lastColumn = column;
+ lastAttributes.set(cell.getAttributes());
+
+ physical[column][row].set(cell);
+ }
+ }
+ }
+
+ public CursorPosition getCursorPosition() {
+ return new CursorPosition(cursorColumn, cursorRow);
+ }
+
+ public void setCursorPosition(int column, int row) {
+ cursorVisible = true;
+ cursorColumn = column;
+ cursorRow = row;
+ }
+
+ public void hideCursor() {
+ cursorVisible = false;
+ }
+
+ public void putString(int column, int row, String string, Attributes attributes) {
+ int i = column;
+ for (int j = 0; j < string.length(); j++) {
+ char ch = string.charAt(j);
+ putChar(i, row, ch, attributes);
+ i += 1;
+ if (i == columns) {
+ break;
+ }
+ }
+ }
+
+ public void putChar(int column, int row, char ch, Attributes attributes) {
+ if (column >= 0 && column < columns && row >= 0 && row < rows) {
+ buffer[column][row].setAttributes(attributes);
+ buffer[column][row].setChar(ch);
+ }
+ }
+
+ public void endOfLine(int column, int row) {
+ if (column >= 0 && column < columns && row >= 0 && row < rows) {
+ buffer[column][row].endOfLine();
+ for (int i = column + 1; i < columns; i++) {
+ buffer[i][row].reset();
+ }
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/TerminalImpl.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/TerminalImpl.java
new file mode 100644
index 00000000000..ed7152335dc
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/TerminalImpl.java
@@ -0,0 +1,227 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal.impl;
+
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.clearAll;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.cursor;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.moveCursor;
+import static org.apache.hadoop.hbase.hbtop.terminal.impl.EscapeSequences.normal;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.io.UncheckedIOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Queue;
+import java.util.StringTokenizer;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import org.apache.hadoop.hbase.hbtop.terminal.CursorPosition;
+import org.apache.hadoop.hbase.hbtop.terminal.KeyPress;
+import org.apache.hadoop.hbase.hbtop.terminal.Terminal;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * The implementation of the {@link Terminal} interface.
+ */
+@InterfaceAudience.Private
+public class TerminalImpl implements Terminal {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(TerminalImpl.class);
+
+ private TerminalSize cachedTerminalSize;
+
+ private final PrintWriter output;
+
+ private final ScreenBuffer screenBuffer;
+
+ private final Queue keyPressQueue;
+ private final KeyPressGenerator keyPressGenerator;
+
+ public TerminalImpl() {
+ this(null);
+ }
+
+ public TerminalImpl(@Nullable String title) {
+ output = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
+ sttyRaw();
+
+ if (title != null) {
+ setTitle(title);
+ }
+
+ screenBuffer = new ScreenBuffer();
+
+ cachedTerminalSize = queryTerminalSize();
+ updateTerminalSize(cachedTerminalSize.getColumns(), cachedTerminalSize.getRows());
+
+ keyPressQueue = new ConcurrentLinkedQueue<>();
+ keyPressGenerator = new KeyPressGenerator(System.in, keyPressQueue);
+ keyPressGenerator.start();
+
+ Runtime.getRuntime().addShutdownHook(new Thread(() -> {
+ output.printf("%s%s%s%s", moveCursor(0, 0), cursor(true), normal(), clearAll());
+ output.flush();
+ sttyCooked();
+ }));
+
+ // Clear the terminal
+ output.write(clearAll());
+ output.flush();
+ }
+
+ private void setTitle(String title) {
+ output.write(EscapeSequences.setTitle(title));
+ output.flush();
+ }
+
+ private void updateTerminalSize(int columns, int rows) {
+ screenBuffer.reallocate(columns, rows);
+ }
+
+ @Override
+ public void clear() {
+ screenBuffer.clear();
+ }
+
+ @Override
+ public void refresh() {
+ screenBuffer.flush(output);
+ }
+
+ @Override
+ public TerminalSize getSize() {
+ return cachedTerminalSize;
+ }
+
+ @Nullable
+ @Override
+ public TerminalSize doResizeIfNecessary() {
+ TerminalSize currentTerminalSize = queryTerminalSize();
+ if (!currentTerminalSize.equals(cachedTerminalSize)) {
+ cachedTerminalSize = currentTerminalSize;
+ updateTerminalSize(cachedTerminalSize.getColumns(), cachedTerminalSize.getRows());
+ return cachedTerminalSize;
+ }
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public KeyPress pollKeyPress() {
+ return keyPressQueue.poll();
+ }
+
+ @Override
+ public CursorPosition getCursorPosition() {
+ return screenBuffer.getCursorPosition();
+ }
+
+ @Override
+ public void setCursorPosition(int column, int row) {
+ screenBuffer.setCursorPosition(column, row);
+ }
+
+ @Override
+ public void hideCursor() {
+ screenBuffer.hideCursor();
+ }
+
+ @Override
+ public TerminalPrinter getTerminalPrinter(int startRow) {
+ return new TerminalPrinterImpl(screenBuffer, startRow);
+ }
+
+ @Override
+ public void close() {
+ keyPressGenerator.stop();
+ }
+
+ private TerminalSize queryTerminalSize() {
+ String sizeString = doStty("size");
+
+ int rows = 0;
+ int columns = 0;
+
+ StringTokenizer tokenizer = new StringTokenizer(sizeString);
+ int rc = Integer.parseInt(tokenizer.nextToken());
+ if (rc > 0) {
+ rows = rc;
+ }
+
+ rc = Integer.parseInt(tokenizer.nextToken());
+ if (rc > 0) {
+ columns = rc;
+ }
+ return new TerminalSize(columns, rows);
+ }
+
+ private void sttyRaw() {
+ doStty("-ignbrk -brkint -parmrk -istrip -inlcr -igncr -icrnl -ixon -opost " +
+ "-echo -echonl -icanon -isig -iexten -parenb cs8 min 1");
+ }
+
+ private void sttyCooked() {
+ doStty("sane cooked");
+ }
+
+ private String doStty(String sttyOptionsString) {
+ String [] cmd = {"/bin/sh", "-c", "stty " + sttyOptionsString + " < /dev/tty"};
+
+ try {
+ Process process = Runtime.getRuntime().exec(cmd);
+
+ String ret;
+
+ // stdout
+ try (BufferedReader stdout = new BufferedReader(new InputStreamReader(
+ process.getInputStream(), StandardCharsets.UTF_8))) {
+ ret = stdout.readLine();
+ }
+
+ // stderr
+ try (BufferedReader stderr = new BufferedReader(new InputStreamReader(
+ process.getErrorStream(), StandardCharsets.UTF_8))) {
+ String line = stderr.readLine();
+ if ((line != null) && (line.length() > 0)) {
+ LOGGER.error("Error output from stty: " + line);
+ }
+ }
+
+ try {
+ process.waitFor();
+ } catch (InterruptedException ignored) {
+ }
+
+ int exitValue = process.exitValue();
+ if (exitValue != 0) {
+ LOGGER.error("stty returned error code: " + exitValue);
+ }
+ return ret;
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+}
diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/TerminalPrinterImpl.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/TerminalPrinterImpl.java
new file mode 100644
index 00000000000..6ad7bdcd9e3
--- /dev/null
+++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/TerminalPrinterImpl.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal.impl;
+
+import java.util.Objects;
+import org.apache.hadoop.hbase.hbtop.terminal.Attributes;
+import org.apache.hadoop.hbase.hbtop.terminal.Color;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter;
+import org.apache.yetus.audience.InterfaceAudience;
+
+
+/**
+ * The implementation of the {@link TerminalPrinter} interface.
+ */
+@InterfaceAudience.Private
+public class TerminalPrinterImpl implements TerminalPrinter {
+ private final ScreenBuffer screenBuffer;
+ private int row;
+ private int column;
+
+ private final Attributes attributes = new Attributes();
+
+ TerminalPrinterImpl(ScreenBuffer screenBuffer, int startRow) {
+ this.screenBuffer = Objects.requireNonNull(screenBuffer);
+ this.row = startRow;
+ }
+
+ @Override
+ public TerminalPrinter print(String value) {
+ screenBuffer.putString(column, row, value, attributes);
+ column += value.length();
+ return this;
+ }
+
+ @Override
+ public TerminalPrinter startHighlight() {
+ attributes.setForegroundColor(Color.BLACK);
+ attributes.setBackgroundColor(Color.WHITE);
+ return this;
+ }
+
+ @Override
+ public TerminalPrinter stopHighlight() {
+ attributes.setForegroundColor(Color.WHITE);
+ attributes.setBackgroundColor(Color.BLACK);
+ return this;
+ }
+
+ @Override
+ public TerminalPrinter startBold() {
+ attributes.setBold(true);
+ return this;
+ }
+
+ @Override
+ public TerminalPrinter stopBold() {
+ attributes.setBold(false);
+ return this;
+ }
+
+ @Override
+ public void endOfLine() {
+ screenBuffer.endOfLine(column, row);
+ row += 1;
+ column = 0;
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordFilterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordFilterTest.java
new file mode 100644
index 00000000000..e7095698566
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordFilterTest.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop;
+
+import static org.hamcrest.CoreMatchers.hasItems;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.junit.Assert.assertThat;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.Size;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class RecordFilterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(RecordFilterTest.class);
+
+ @Test
+ public void testParseAndBuilder() {
+ testParseAndBuilder("REGION=region1", false,
+ RecordFilter.newBuilder(Field.REGION).equal("region1"));
+
+ testParseAndBuilder("REGION=", false,
+ RecordFilter.newBuilder(Field.REGION).equal(""));
+
+ testParseAndBuilder("!REGION=region1", false,
+ RecordFilter.newBuilder(Field.REGION).notEqual("region1"));
+
+ testParseAndBuilder("REGION==region2", true,
+ RecordFilter.newBuilder(Field.REGION, true).doubleEquals("region2"));
+
+ testParseAndBuilder("!REGION==region2", true,
+ RecordFilter.newBuilder(Field.REGION, true).notDoubleEquals("region2"));
+
+ testParseAndBuilder("#REQ/S>100", false,
+ RecordFilter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).greater(100L));
+
+ testParseAndBuilder("!#REQ/S>100", false,
+ RecordFilter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).notGreater(100L));
+
+ testParseAndBuilder("SF>=50MB", true,
+ RecordFilter.newBuilder(Field.STORE_FILE_SIZE, true).greaterOrEqual("50MB"));
+
+ testParseAndBuilder("!SF>=50MB", true,
+ RecordFilter.newBuilder(Field.STORE_FILE_SIZE, true).notGreaterOrEqual("50MB"));
+
+ testParseAndBuilder("#REQ/S<20", false,
+ RecordFilter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).less(20L));
+
+ testParseAndBuilder("!#REQ/S<20", false,
+ RecordFilter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).notLess(20L));
+
+ testParseAndBuilder("%COMP<=50%", true,
+ RecordFilter.newBuilder(Field.COMPACTION_PROGRESS, true).lessOrEqual("50%"));
+
+ testParseAndBuilder("!%COMP<=50%", true,
+ RecordFilter.newBuilder(Field.COMPACTION_PROGRESS, true).notLessOrEqual("50%"));
+ }
+
+ private void testParseAndBuilder(String filterString, boolean ignoreCase, RecordFilter expected) {
+ RecordFilter actual = RecordFilter.parse(filterString, ignoreCase);
+ assertThat(expected, is(actual));
+ }
+
+ @Test
+ public void testParseFailure() {
+ RecordFilter filter = RecordFilter.parse("REGIO=region1", false);
+ assertThat(filter, is(nullValue()));
+
+ filter = RecordFilter.parse("", false);
+ assertThat(filter, is(nullValue()));
+
+ filter = RecordFilter.parse("#REQ/S==aaa", false);
+ assertThat(filter, is(nullValue()));
+
+ filter = RecordFilter.parse("SF>=50", false);
+ assertThat(filter, is(nullValue()));
+ }
+
+ @Test
+ public void testToString() {
+ testToString("REGION=region1");
+ testToString("!REGION=region1");
+ testToString("REGION==region2");
+ testToString("!REGION==region2");
+ testToString("#REQ/S>100");
+ testToString("!#REQ/S>100");
+ testToString("SF>=50.0MB");
+ testToString("!SF>=50.0MB");
+ testToString("#REQ/S<20");
+ testToString("!#REQ/S<20");
+ testToString("%COMP<=50.00%");
+ testToString("!%COMP<=50.00%");
+ }
+
+ private void testToString(String filterString) {
+ RecordFilter filter = RecordFilter.parse(filterString, false);
+ assertThat(filter, is(notNullValue()));
+ assertThat(filterString, is(filter.toString()));
+ }
+
+ @Test
+ public void testFilters() {
+ List records = createTestRecords();
+
+ testFilter(records, "REGION=region", false,
+ "region1", "region2", "region3", "region4", "region5");
+ testFilter(records, "!REGION=region", false);
+ testFilter(records, "REGION=Region", false);
+
+ testFilter(records, "REGION==region", false);
+ testFilter(records, "REGION==region1", false, "region1");
+ testFilter(records, "!REGION==region1", false, "region2", "region3", "region4", "region5");
+
+ testFilter(records, "#REQ/S==100", false, "region1");
+ testFilter(records, "#REQ/S>100", false, "region2", "region5");
+ testFilter(records, "SF>=100MB", false, "region1", "region2", "region4", "region5");
+ testFilter(records, "!#SF>=10", false, "region1", "region4");
+ testFilter(records, "LOCALITY<0.5", false, "region5");
+ testFilter(records, "%COMP<=50%", false, "region2", "region3", "region4", "region5");
+
+ testFilters(records, Arrays.asList("SF>=100MB", "#REQ/S>100"), false,
+ "region2", "region5");
+ testFilters(records, Arrays.asList("%COMP<=50%", "!#SF>=10"), false, "region4");
+ testFilters(records, Arrays.asList("!REGION==region1", "LOCALITY<0.5", "#REQ/S>100"), false,
+ "region5");
+ }
+
+ @Test
+ public void testFiltersIgnoreCase() {
+ List records = createTestRecords();
+
+ testFilter(records, "REGION=Region", true,
+ "region1", "region2", "region3", "region4", "region5");
+ testFilter(records, "REGION=REGION", true,
+ "region1", "region2", "region3", "region4", "region5");
+ }
+
+ private List createTestRecords() {
+ List ret = new ArrayList<>();
+ ret.add(createTestRecord("region1", 100L, new Size(100, Size.Unit.MEGABYTE), 2, 1.0f, 80f));
+ ret.add(createTestRecord("region2", 120L, new Size(100, Size.Unit.GIGABYTE), 10, 0.5f, 20f));
+ ret.add(createTestRecord("region3", 50L, new Size(500, Size.Unit.KILOBYTE), 15, 0.8f, 50f));
+ ret.add(createTestRecord("region4", 90L, new Size(10, Size.Unit.TERABYTE), 5, 0.9f, 30f));
+ ret.add(createTestRecord("region5", 200L, new Size(1, Size.Unit.PETABYTE), 13, 0.1f, 40f));
+ return ret;
+ }
+
+ private Record createTestRecord(String region, long requestCountPerSecond,
+ Size storeFileSize, int numStoreFiles, float locality, float compactionProgress) {
+ Record.Builder builder = Record.builder();
+ builder.put(Field.REGION, region);
+ builder.put(Field.REQUEST_COUNT_PER_SECOND, requestCountPerSecond);
+ builder.put(Field.STORE_FILE_SIZE, storeFileSize);
+ builder.put(Field.NUM_STORE_FILES, numStoreFiles);
+ builder.put(Field.LOCALITY, locality);
+ builder.put(Field.COMPACTION_PROGRESS, compactionProgress);
+ return builder.build();
+ }
+
+ private void testFilter(List records, String filterString, boolean ignoreCase,
+ String... expectedRegions) {
+ testFilters(records, Collections.singletonList(filterString), ignoreCase, expectedRegions);
+ }
+
+ private void testFilters(List records, List filterStrings, boolean ignoreCase,
+ String... expectedRegions) {
+ List actual =
+ records.stream().filter(r -> filterStrings.stream()
+ .map(f -> RecordFilter.parse(f, ignoreCase))
+ .allMatch(f -> f.execute(r)))
+ .map(r -> r.get(Field.REGION).asString())
+ .collect(Collectors.toList());
+ assertThat(actual, hasItems(expectedRegions));
+ assertThat(actual.size(), is(expectedRegions.length));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordTest.java
new file mode 100644
index 00000000000..096bbfc6dca
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordTest.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop;
+
+import static org.apache.hadoop.hbase.hbtop.Record.entry;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class RecordTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(RecordTest.class);
+
+ @Test
+ public void testBuilder() {
+ Record actual1 = Record.builder().put(Field.TABLE, "tableName")
+ .put(entry(Field.REGION_COUNT, 3))
+ .put(Field.REQUEST_COUNT_PER_SECOND, Field.REQUEST_COUNT_PER_SECOND.newValue(100L))
+ .build();
+
+ assertThat(actual1.size(), is(3));
+ assertThat(actual1.get(Field.TABLE).asString(), is("tableName"));
+ assertThat(actual1.get(Field.REGION_COUNT).asInt(), is(3));
+ assertThat(actual1.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), is(100L));
+
+ Record actual2 = Record.builder().putAll(actual1).build();
+
+ assertThat(actual2.size(), is(3));
+ assertThat(actual2.get(Field.TABLE).asString(), is("tableName"));
+ assertThat(actual2.get(Field.REGION_COUNT).asInt(), is(3));
+ assertThat(actual2.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), is(100L));
+ }
+
+ @Test
+ public void testOfEntries() {
+ Record actual = Record.ofEntries(
+ entry(Field.TABLE, "tableName"),
+ entry(Field.REGION_COUNT, 3),
+ entry(Field.REQUEST_COUNT_PER_SECOND, 100L)
+ );
+
+ assertThat(actual.size(), is(3));
+ assertThat(actual.get(Field.TABLE).asString(), is("tableName"));
+ assertThat(actual.get(Field.REGION_COUNT).asInt(), is(3));
+ assertThat(actual.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), is(100L));
+ }
+
+ @Test
+ public void testCombine() {
+ Record record1 = Record.ofEntries(
+ entry(Field.TABLE, "tableName"),
+ entry(Field.REGION_COUNT, 3),
+ entry(Field.REQUEST_COUNT_PER_SECOND, 100L)
+ );
+
+ Record record2 = Record.ofEntries(
+ entry(Field.TABLE, "tableName"),
+ entry(Field.REGION_COUNT, 5),
+ entry(Field.REQUEST_COUNT_PER_SECOND, 500L)
+ );
+
+ Record actual = record1.combine(record2);
+
+ assertThat(actual.size(), is(3));
+ assertThat(actual.get(Field.TABLE).asString(), is("tableName"));
+ assertThat(actual.get(Field.REGION_COUNT).asInt(), is(8));
+ assertThat(actual.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), is(600L));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/TestUtils.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/TestUtils.java
new file mode 100644
index 00000000000..43a84474027
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/TestUtils.java
@@ -0,0 +1,402 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.commons.lang3.time.FastDateFormat;
+import org.apache.hadoop.hbase.ClusterMetrics;
+import org.apache.hadoop.hbase.ClusterMetricsBuilder;
+import org.apache.hadoop.hbase.RegionMetrics;
+import org.apache.hadoop.hbase.RegionMetricsBuilder;
+import org.apache.hadoop.hbase.ServerMetrics;
+import org.apache.hadoop.hbase.ServerMetricsBuilder;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.Size;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.screen.top.Summary;
+import org.apache.hadoop.hbase.master.RegionState;
+import org.apache.hadoop.hbase.util.Bytes;
+
+
+public final class TestUtils {
+
+ private TestUtils() {
+ }
+
+ public static ClusterMetrics createDummyClusterMetrics() {
+ Map serverMetricsMap = new HashMap<>();
+
+ // host1
+ List regionMetricsList = new ArrayList<>();
+ regionMetricsList.add(createRegionMetrics(
+ "table1,,1.00000000000000000000000000000000.",
+ 100, 50, 100,
+ new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1,
+ new Size(100, Size.Unit.MEGABYTE), 0.1f, 100, 100, "2019-07-22 00:00:00"));
+ regionMetricsList.add(createRegionMetrics(
+ "table2,1,2.00000000000000000000000000000001.",
+ 200, 100, 200,
+ new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2,
+ new Size(200, Size.Unit.MEGABYTE), 0.2f, 50, 200, "2019-07-22 00:00:01"));
+ regionMetricsList.add(createRegionMetrics(
+ "namespace:table3,,3_0001.00000000000000000000000000000002.",
+ 300, 150, 300,
+ new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3,
+ new Size(300, Size.Unit.MEGABYTE), 0.3f, 100, 300, "2019-07-22 00:00:02"));
+
+ ServerName host1 = ServerName.valueOf("host1.apache.com", 1000, 1);
+ serverMetricsMap.put(host1, createServerMetrics(host1, 100,
+ new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 100,
+ regionMetricsList));
+
+ // host2
+ regionMetricsList.clear();
+ regionMetricsList.add(createRegionMetrics(
+ "table1,1,4.00000000000000000000000000000003.",
+ 100, 50, 100,
+ new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1,
+ new Size(100, Size.Unit.MEGABYTE), 0.4f, 50, 100, "2019-07-22 00:00:03"));
+ regionMetricsList.add(createRegionMetrics(
+ "table2,,5.00000000000000000000000000000004.",
+ 200, 100, 200,
+ new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2,
+ new Size(200, Size.Unit.MEGABYTE), 0.5f, 150, 200, "2019-07-22 00:00:04"));
+ regionMetricsList.add(createRegionMetrics(
+ "namespace:table3,,6.00000000000000000000000000000005.",
+ 300, 150, 300,
+ new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3,
+ new Size(300, Size.Unit.MEGABYTE), 0.6f, 200, 300, "2019-07-22 00:00:05"));
+
+ ServerName host2 = ServerName.valueOf("host2.apache.com", 1001, 2);
+ serverMetricsMap.put(host2, createServerMetrics(host2, 200,
+ new Size(16, Size.Unit.GIGABYTE), new Size(32, Size.Unit.GIGABYTE), 200,
+ regionMetricsList));
+
+ ServerName host3 = ServerName.valueOf("host3.apache.com", 1002, 3);
+ return ClusterMetricsBuilder.newBuilder()
+ .setHBaseVersion("3.0.0-SNAPSHOT")
+ .setClusterId("01234567-89ab-cdef-0123-456789abcdef")
+ .setLiveServerMetrics(serverMetricsMap)
+ .setDeadServerNames(Collections.singletonList(host3))
+ .setRegionsInTransition(Collections.singletonList(
+ new RegionState(RegionInfoBuilder.newBuilder(TableName.valueOf("table4"))
+ .setStartKey(new byte [0])
+ .setEndKey(new byte [0])
+ .setOffline(true)
+ .setReplicaId(0)
+ .setRegionId(0)
+ .setSplit(false)
+ .build(),
+ RegionState.State.OFFLINE, host3)))
+ .build();
+ }
+
+ private static RegionMetrics createRegionMetrics(String regionName, long readRequestCount,
+ long filteredReadRequestCount, long writeRequestCount, Size storeFileSize,
+ Size uncompressedStoreFileSize, int storeFileCount, Size memStoreSize, float locality,
+ long compactedCellCount, long compactingCellCount, String lastMajorCompactionTime) {
+
+ FastDateFormat df = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss");
+ try {
+ return RegionMetricsBuilder.newBuilder(Bytes.toBytes(regionName))
+ .setReadRequestCount(readRequestCount)
+ .setFilteredReadRequestCount(filteredReadRequestCount)
+ .setWriteRequestCount(writeRequestCount).setStoreFileSize(storeFileSize)
+ .setUncompressedStoreFileSize(uncompressedStoreFileSize).setStoreFileCount(storeFileCount)
+ .setMemStoreSize(memStoreSize).setDataLocality(locality)
+ .setCompactedCellCount(compactedCellCount).setCompactingCellCount(compactingCellCount)
+ .setLastMajorCompactionTimestamp(df.parse(lastMajorCompactionTime).getTime()).build();
+ } catch (ParseException e) {
+ throw new IllegalArgumentException(e);
+ }
+ }
+
+ private static ServerMetrics createServerMetrics(ServerName serverName, long reportTimestamp,
+ Size usedHeapSize, Size maxHeapSize, long requestCountPerSecond,
+ List regionMetricsList) {
+
+ return ServerMetricsBuilder.newBuilder(serverName)
+ .setReportTimestamp(reportTimestamp)
+ .setUsedHeapSize(usedHeapSize)
+ .setMaxHeapSize(maxHeapSize)
+ .setRequestCountPerSecond(requestCountPerSecond)
+ .setRegionMetrics(regionMetricsList).build();
+ }
+
+ public static void assertRecordsInRegionMode(List records) {
+ assertThat(records.size(), is(6));
+
+ for (Record record : records) {
+ switch (record.get(Field.REGION_NAME).asString()) {
+ case "table1,,1.00000000000000000000000000000000.":
+ assertRecordInRegionMode(record, "default", "1", "", "table1",
+ "00000000000000000000000000000000", "host1:1000", "host1.apache.com,1000,1",0L,
+ 0L, 0L, 0L, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1,
+ new Size(100, Size.Unit.MEGABYTE), 0.1f, "", 100L, 100L, 100f,
+ "2019-07-22 00:00:00");
+ break;
+
+ case "table1,1,4.00000000000000000000000000000003.":
+ assertRecordInRegionMode(record, "default", "4", "", "table1",
+ "00000000000000000000000000000003", "host2:1001", "host2.apache.com,1001,2",0L,
+ 0L, 0L, 0L, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1,
+ new Size(100, Size.Unit.MEGABYTE), 0.4f, "1", 100L, 50L, 50f,
+ "2019-07-22 00:00:03");
+ break;
+
+ case "table2,,5.00000000000000000000000000000004.":
+ assertRecordInRegionMode(record, "default", "5", "", "table2",
+ "00000000000000000000000000000004", "host2:1001", "host2.apache.com,1001,2",0L,
+ 0L, 0L, 0L, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2,
+ new Size(200, Size.Unit.MEGABYTE), 0.5f, "", 200L, 150L, 75f,
+ "2019-07-22 00:00:04");
+ break;
+
+ case "table2,1,2.00000000000000000000000000000001.":
+ assertRecordInRegionMode(record, "default", "2", "", "table2",
+ "00000000000000000000000000000001", "host1:1000", "host1.apache.com,1000,1",0L,
+ 0L, 0L, 0L, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2,
+ new Size(200, Size.Unit.MEGABYTE), 0.2f, "1", 200L, 50L, 25f,
+ "2019-07-22 00:00:01");
+ break;
+
+ case "namespace:table3,,6.00000000000000000000000000000005.":
+ assertRecordInRegionMode(record, "namespace", "6", "", "table3",
+ "00000000000000000000000000000005", "host2:1001", "host2.apache.com,1001,2",0L,
+ 0L, 0L, 0L, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3,
+ new Size(300, Size.Unit.MEGABYTE), 0.6f, "", 300L, 200L, 66.66667f,
+ "2019-07-22 00:00:05");
+ break;
+
+ case "namespace:table3,,3_0001.00000000000000000000000000000002.":
+ assertRecordInRegionMode(record, "namespace", "3", "1", "table3",
+ "00000000000000000000000000000002", "host1:1000", "host1.apache.com,1000,1",0L,
+ 0L, 0L, 0L, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3,
+ new Size(300, Size.Unit.MEGABYTE), 0.3f, "", 300L, 100L, 33.333336f,
+ "2019-07-22 00:00:02");
+ break;
+
+ default:
+ fail();
+ }
+ }
+ }
+
+ private static void assertRecordInRegionMode(Record record, String namespace, String startCode,
+ String replicaId, String table, String region, String regionServer, String longRegionServer,
+ long requestCountPerSecond, long readRequestCountPerSecond,
+ long filteredReadRequestCountPerSecond, long writeCountRequestPerSecond,
+ Size storeFileSize, Size uncompressedStoreFileSize, int numStoreFiles,
+ Size memStoreSize, float Locality, String startKey, long compactingCellCount,
+ long compactedCellCount, float compactionProgress, String lastMajorCompactionTime) {
+ assertThat(record.size(), is(22));
+ assertThat(record.get(Field.NAMESPACE).asString(), is(namespace));
+ assertThat(record.get(Field.START_CODE).asString(), is(startCode));
+ assertThat(record.get(Field.REPLICA_ID).asString(), is(replicaId));
+ assertThat(record.get(Field.TABLE).asString(), is(table));
+ assertThat(record.get(Field.REGION).asString(), is(region));
+ assertThat(record.get(Field.REGION_SERVER).asString(), is(regionServer));
+ assertThat(record.get(Field.LONG_REGION_SERVER).asString(), is(longRegionServer));
+ assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(),
+ is(requestCountPerSecond));
+ assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(readRequestCountPerSecond));
+ assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(filteredReadRequestCountPerSecond));
+ assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(writeCountRequestPerSecond));
+ assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize));
+ assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(),
+ is(uncompressedStoreFileSize));
+ assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles));
+ assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize));
+ assertThat(record.get(Field.LOCALITY).asFloat(), is(Locality));
+ assertThat(record.get(Field.START_KEY).asString(), is(startKey));
+ assertThat(record.get(Field.COMPACTING_CELL_COUNT).asLong(), is(compactingCellCount));
+ assertThat(record.get(Field.COMPACTED_CELL_COUNT).asLong(), is(compactedCellCount));
+ assertThat(record.get(Field.COMPACTION_PROGRESS).asFloat(), is(compactionProgress));
+ assertThat(record.get(Field.LAST_MAJOR_COMPACTION_TIME).asString(),
+ is(lastMajorCompactionTime));
+ }
+
+ public static void assertRecordsInNamespaceMode(List records) {
+ assertThat(records.size(), is(2));
+
+ for (Record record : records) {
+ switch (record.get(Field.NAMESPACE).asString()) {
+ case "default":
+ assertRecordInNamespaceMode(record, 0L, 0L, 0L, 0L, new Size(600, Size.Unit.MEGABYTE),
+ new Size(1200, Size.Unit.MEGABYTE), 6, new Size(600, Size.Unit.MEGABYTE), 4);
+ break;
+
+ case "namespace":
+ assertRecordInNamespaceMode(record, 0L, 0L, 0L, 0L, new Size(600, Size.Unit.MEGABYTE),
+ new Size(1200, Size.Unit.MEGABYTE), 6, new Size(600, Size.Unit.MEGABYTE), 2);
+ break;
+
+ default:
+ fail();
+ }
+ }
+ }
+
+ private static void assertRecordInNamespaceMode(Record record, long requestCountPerSecond,
+ long readRequestCountPerSecond, long filteredReadRequestCountPerSecond,
+ long writeCountRequestPerSecond, Size storeFileSize, Size uncompressedStoreFileSize,
+ int numStoreFiles, Size memStoreSize, int regionCount) {
+ assertThat(record.size(), is(10));
+ assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(),
+ is(requestCountPerSecond));
+ assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(readRequestCountPerSecond));
+ assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(filteredReadRequestCountPerSecond));
+ assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(writeCountRequestPerSecond));
+ assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize));
+ assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(),
+ is(uncompressedStoreFileSize));
+ assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles));
+ assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize));
+ assertThat(record.get(Field.REGION_COUNT).asInt(), is(regionCount));
+ }
+
+ public static void assertRecordsInTableMode(List records) {
+ assertThat(records.size(), is(3));
+
+ for (Record record : records) {
+ String tableName = String.format("%s:%s", record.get(Field.NAMESPACE).asString(),
+ record.get(Field.TABLE).asString());
+
+ switch (tableName) {
+ case "default:table1":
+ assertRecordInTableMode(record, 0L, 0L, 0L, 0L, new Size(200, Size.Unit.MEGABYTE),
+ new Size(400, Size.Unit.MEGABYTE), 2, new Size(200, Size.Unit.MEGABYTE), 2);
+ break;
+
+ case "default:table2":
+ assertRecordInTableMode(record, 0L, 0L, 0L, 0L, new Size(400, Size.Unit.MEGABYTE),
+ new Size(800, Size.Unit.MEGABYTE), 4, new Size(400, Size.Unit.MEGABYTE), 2);
+ break;
+
+ case "namespace:table3":
+ assertRecordInTableMode(record, 0L, 0L, 0L, 0L, new Size(600, Size.Unit.MEGABYTE),
+ new Size(1200, Size.Unit.MEGABYTE), 6, new Size(600, Size.Unit.MEGABYTE), 2);
+ break;
+
+ default:
+ fail();
+ }
+ }
+ }
+
+ private static void assertRecordInTableMode(Record record, long requestCountPerSecond,
+ long readRequestCountPerSecond, long filteredReadRequestCountPerSecond,
+ long writeCountRequestPerSecond, Size storeFileSize, Size uncompressedStoreFileSize,
+ int numStoreFiles, Size memStoreSize, int regionCount) {
+ assertThat(record.size(), is(11));
+ assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(),
+ is(requestCountPerSecond));
+ assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(readRequestCountPerSecond));
+ assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(filteredReadRequestCountPerSecond));
+ assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(writeCountRequestPerSecond));
+ assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize));
+ assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(),
+ is(uncompressedStoreFileSize));
+ assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles));
+ assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize));
+ assertThat(record.get(Field.REGION_COUNT).asInt(), is(regionCount));
+ }
+
+ public static void assertRecordsInRegionServerMode(List records) {
+ assertThat(records.size(), is(2));
+
+ for (Record record : records) {
+ switch (record.get(Field.REGION_SERVER).asString()) {
+ case "host1:1000":
+ assertRecordInRegionServerMode(record, "host1.apache.com,1000,1", 0L, 0L, 0L, 0L,
+ new Size(600, Size.Unit.MEGABYTE), new Size(1200, Size.Unit.MEGABYTE), 6,
+ new Size(600, Size.Unit.MEGABYTE), 3, new Size(100, Size.Unit.MEGABYTE),
+ new Size(200, Size.Unit.MEGABYTE));
+ break;
+
+ case "host2:1001":
+ assertRecordInRegionServerMode(record, "host2.apache.com,1001,2", 0L, 0L, 0L, 0L,
+ new Size(600, Size.Unit.MEGABYTE), new Size(1200, Size.Unit.MEGABYTE), 6,
+ new Size(600, Size.Unit.MEGABYTE), 3, new Size(16, Size.Unit.GIGABYTE),
+ new Size(32, Size.Unit.GIGABYTE));
+ break;
+
+ default:
+ fail();
+ }
+ }
+ }
+
+ private static void assertRecordInRegionServerMode(Record record, String longRegionServer,
+ long requestCountPerSecond, long readRequestCountPerSecond,
+ long filteredReadRequestCountPerSecond, long writeCountRequestPerSecond,
+ Size storeFileSize, Size uncompressedStoreFileSize, int numStoreFiles,
+ Size memStoreSize, int regionCount, Size usedHeapSize, Size maxHeapSize) {
+ assertThat(record.size(), is(13));
+ assertThat(record.get(Field.LONG_REGION_SERVER).asString(),
+ is(longRegionServer));
+ assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(),
+ is(requestCountPerSecond));
+ assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(readRequestCountPerSecond));
+ assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(filteredReadRequestCountPerSecond));
+ assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(),
+ is(writeCountRequestPerSecond));
+ assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize));
+ assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(),
+ is(uncompressedStoreFileSize));
+ assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles));
+ assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize));
+ assertThat(record.get(Field.REGION_COUNT).asInt(), is(regionCount));
+ assertThat(record.get(Field.USED_HEAP_SIZE).asSize(), is(usedHeapSize));
+ assertThat(record.get(Field.MAX_HEAP_SIZE).asSize(), is(maxHeapSize));
+ }
+
+ public static void assertSummary(Summary summary) {
+ assertThat(summary.getVersion(), is("3.0.0-SNAPSHOT"));
+ assertThat(summary.getClusterId(), is("01234567-89ab-cdef-0123-456789abcdef"));
+ assertThat(summary.getServers(), is(3));
+ assertThat(summary.getLiveServers(), is(2));
+ assertThat(summary.getDeadServers(), is(1));
+ assertThat(summary.getRegionCount(), is(6));
+ assertThat(summary.getRitCount(), is(1));
+ assertThat(summary.getAverageLoad(), is(3.0));
+ assertThat(summary.getAggregateRequestPerSecond(), is(300L));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/field/FieldValueTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/field/FieldValueTest.java
new file mode 100644
index 00000000000..3cb11076141
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/field/FieldValueTest.java
@@ -0,0 +1,298 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.field;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.Size;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class FieldValueTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(FieldValueTest.class);
+
+ @Test
+ public void testParseAndAsSomethingMethod() {
+ // String
+ FieldValue stringFieldValue = new FieldValue("aaa", FieldValueType.STRING);
+ assertThat(stringFieldValue.asString(), is("aaa"));
+
+ try {
+ new FieldValue(1, FieldValueType.STRING);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ // Integer
+ FieldValue integerFieldValue = new FieldValue(100, FieldValueType.INTEGER);
+ assertThat(integerFieldValue.asInt(), is(100));
+
+ integerFieldValue = new FieldValue("100", FieldValueType.INTEGER);
+ assertThat(integerFieldValue.asInt(), is(100));
+
+ try {
+ new FieldValue("aaa", FieldValueType.INTEGER);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ // Long
+ FieldValue longFieldValue = new FieldValue(100L, FieldValueType.LONG);
+ assertThat(longFieldValue.asLong(), is(100L));
+
+ longFieldValue = new FieldValue("100", FieldValueType.LONG);
+ assertThat(longFieldValue.asLong(), is(100L));
+
+ try {
+ new FieldValue("aaa", FieldValueType.LONG);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ try {
+ new FieldValue(100, FieldValueType.LONG);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ // Float
+ FieldValue floatFieldValue = new FieldValue(1.0f, FieldValueType.FLOAT);
+ assertThat(floatFieldValue.asFloat(), is(1.0f));
+
+ floatFieldValue = new FieldValue("1", FieldValueType.FLOAT);
+ assertThat(floatFieldValue.asFloat(), is(1.0f));
+
+ try {
+ new FieldValue("aaa", FieldValueType.FLOAT);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ try {
+ new FieldValue(1, FieldValueType.FLOAT);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ // Size
+ FieldValue sizeFieldValue =
+ new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("100.0MB"));
+ assertThat(sizeFieldValue.asSize(), is(new Size(100, Size.Unit.MEGABYTE)));
+
+ sizeFieldValue = new FieldValue("100MB", FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("100.0MB"));
+ assertThat(sizeFieldValue.asSize(), is(new Size(100, Size.Unit.MEGABYTE)));
+
+ try {
+ new FieldValue("100", FieldValueType.SIZE);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ try {
+ new FieldValue(100, FieldValueType.SIZE);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+
+ // Percent
+ FieldValue percentFieldValue =
+ new FieldValue(100f, FieldValueType.PERCENT);
+ assertThat(percentFieldValue.asString(), is("100.00%"));
+ assertThat(percentFieldValue.asFloat(), is(100f));
+
+ percentFieldValue = new FieldValue("100%", FieldValueType.PERCENT);
+ assertThat(percentFieldValue.asString(), is("100.00%"));
+ assertThat(percentFieldValue.asFloat(), is(100f));
+
+ percentFieldValue = new FieldValue("100", FieldValueType.PERCENT);
+ assertThat(percentFieldValue.asString(), is("100.00%"));
+ assertThat(percentFieldValue.asFloat(), is(100f));
+
+ try {
+ new FieldValue(100, FieldValueType.PERCENT);
+ fail();
+ } catch (IllegalArgumentException ignored) {
+ }
+ }
+
+ @Test
+ public void testCompareTo() {
+ // String
+ FieldValue stringAFieldValue = new FieldValue("a", FieldValueType.STRING);
+ FieldValue stringAFieldValue2 = new FieldValue("a", FieldValueType.STRING);
+ FieldValue stringBFieldValue = new FieldValue("b", FieldValueType.STRING);
+ FieldValue stringCapitalAFieldValue = new FieldValue("A", FieldValueType.STRING);
+
+ assertThat(stringAFieldValue.compareTo(stringAFieldValue2), is(0));
+ assertThat(stringBFieldValue.compareTo(stringAFieldValue), is(1));
+ assertThat(stringAFieldValue.compareTo(stringBFieldValue), is(-1));
+ assertThat(stringAFieldValue.compareTo(stringCapitalAFieldValue), is(32));
+
+ // Integer
+ FieldValue integer1FieldValue = new FieldValue(1, FieldValueType.INTEGER);
+ FieldValue integer1FieldValue2 = new FieldValue(1, FieldValueType.INTEGER);
+ FieldValue integer2FieldValue = new FieldValue(2, FieldValueType.INTEGER);
+
+ assertThat(integer1FieldValue.compareTo(integer1FieldValue2), is(0));
+ assertThat(integer2FieldValue.compareTo(integer1FieldValue), is(1));
+ assertThat(integer1FieldValue.compareTo(integer2FieldValue), is(-1));
+
+ // Long
+ FieldValue long1FieldValue = new FieldValue(1L, FieldValueType.LONG);
+ FieldValue long1FieldValue2 = new FieldValue(1L, FieldValueType.LONG);
+ FieldValue long2FieldValue = new FieldValue(2L, FieldValueType.LONG);
+
+ assertThat(long1FieldValue.compareTo(long1FieldValue2), is(0));
+ assertThat(long2FieldValue.compareTo(long1FieldValue), is(1));
+ assertThat(long1FieldValue.compareTo(long2FieldValue), is(-1));
+
+ // Float
+ FieldValue float1FieldValue = new FieldValue(1.0f, FieldValueType.FLOAT);
+ FieldValue float1FieldValue2 = new FieldValue(1.0f, FieldValueType.FLOAT);
+ FieldValue float2FieldValue = new FieldValue(2.0f, FieldValueType.FLOAT);
+
+ assertThat(float1FieldValue.compareTo(float1FieldValue2), is(0));
+ assertThat(float2FieldValue.compareTo(float1FieldValue), is(1));
+ assertThat(float1FieldValue.compareTo(float2FieldValue), is(-1));
+
+ // Size
+ FieldValue size100MBFieldValue =
+ new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+ FieldValue size100MBFieldValue2 =
+ new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+ FieldValue size200MBFieldValue =
+ new FieldValue(new Size(200, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+
+ assertThat(size100MBFieldValue.compareTo(size100MBFieldValue2), is(0));
+ assertThat(size200MBFieldValue.compareTo(size100MBFieldValue), is(1));
+ assertThat(size100MBFieldValue.compareTo(size200MBFieldValue), is(-1));
+
+ // Percent
+ FieldValue percent50FieldValue = new FieldValue(50.0f, FieldValueType.PERCENT);
+ FieldValue percent50FieldValue2 = new FieldValue(50.0f, FieldValueType.PERCENT);
+ FieldValue percent100FieldValue = new FieldValue(100.0f, FieldValueType.PERCENT);
+
+ assertThat(percent50FieldValue.compareTo(percent50FieldValue2), is(0));
+ assertThat(percent100FieldValue.compareTo(percent50FieldValue), is(1));
+ assertThat(percent50FieldValue.compareTo(percent100FieldValue), is(-1));
+ }
+
+ @Test
+ public void testPlus() {
+ // String
+ FieldValue stringFieldValue = new FieldValue("a", FieldValueType.STRING);
+ FieldValue stringFieldValue2 = new FieldValue("b", FieldValueType.STRING);
+ assertThat(stringFieldValue.plus(stringFieldValue2).asString(), is("ab"));
+
+ // Integer
+ FieldValue integerFieldValue = new FieldValue(1, FieldValueType.INTEGER);
+ FieldValue integerFieldValue2 = new FieldValue(2, FieldValueType.INTEGER);
+ assertThat(integerFieldValue.plus(integerFieldValue2).asInt(), is(3));
+
+ // Long
+ FieldValue longFieldValue = new FieldValue(1L, FieldValueType.LONG);
+ FieldValue longFieldValue2 = new FieldValue(2L, FieldValueType.LONG);
+ assertThat(longFieldValue.plus(longFieldValue2).asLong(), is(3L));
+
+ // Float
+ FieldValue floatFieldValue = new FieldValue(1.2f, FieldValueType.FLOAT);
+ FieldValue floatFieldValue2 = new FieldValue(2.2f, FieldValueType.FLOAT);
+ assertThat(floatFieldValue.plus(floatFieldValue2).asFloat(), is(3.4f));
+
+ // Size
+ FieldValue sizeFieldValue =
+ new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+ FieldValue sizeFieldValue2 =
+ new FieldValue(new Size(200, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.plus(sizeFieldValue2).asString(), is("300.0MB"));
+ assertThat(sizeFieldValue.plus(sizeFieldValue2).asSize(),
+ is(new Size(300, Size.Unit.MEGABYTE)));
+
+ // Percent
+ FieldValue percentFieldValue = new FieldValue(30f, FieldValueType.PERCENT);
+ FieldValue percentFieldValue2 = new FieldValue(60f, FieldValueType.PERCENT);
+ assertThat(percentFieldValue.plus(percentFieldValue2).asString(), is("90.00%"));
+ assertThat(percentFieldValue.plus(percentFieldValue2).asFloat(), is(90f));
+ }
+
+ @Test
+ public void testCompareToIgnoreCase() {
+ FieldValue stringAFieldValue = new FieldValue("a", FieldValueType.STRING);
+ FieldValue stringCapitalAFieldValue = new FieldValue("A", FieldValueType.STRING);
+ FieldValue stringCapitalBFieldValue = new FieldValue("B", FieldValueType.STRING);
+
+ assertThat(stringAFieldValue.compareToIgnoreCase(stringCapitalAFieldValue), is(0));
+ assertThat(stringCapitalBFieldValue.compareToIgnoreCase(stringAFieldValue), is(1));
+ assertThat(stringAFieldValue.compareToIgnoreCase(stringCapitalBFieldValue), is(-1));
+ }
+
+ @Test
+ public void testOptimizeSize() {
+ FieldValue sizeFieldValue =
+ new FieldValue(new Size(1, Size.Unit.BYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("1.0B"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(1024, Size.Unit.BYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("1.0KB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(2 * 1024, Size.Unit.BYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("2.0KB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(2 * 1024, Size.Unit.KILOBYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("2.0MB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(1024 * 1024, Size.Unit.KILOBYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("1.0GB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(2 * 1024 * 1024, Size.Unit.MEGABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("2.0TB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(2 * 1024, Size.Unit.TERABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("2.0PB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(1024 * 1024, Size.Unit.TERABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("1024.0PB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(1, Size.Unit.PETABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("1.0PB"));
+
+ sizeFieldValue =
+ new FieldValue(new Size(1024, Size.Unit.PETABYTE), FieldValueType.SIZE);
+ assertThat(sizeFieldValue.asString(), is("1024.0PB"));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/ModeTestBase.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/ModeTestBase.java
new file mode 100644
index 00000000000..7ad1a3a870a
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/ModeTestBase.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import java.util.List;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.TestUtils;
+import org.junit.Test;
+
+
+public abstract class ModeTestBase {
+
+ @Test
+ public void testGetRecords() {
+ List records = getMode().getRecords(TestUtils.createDummyClusterMetrics());
+ assertRecords(records);
+ }
+
+ protected abstract Mode getMode();
+ protected abstract void assertRecords(List records);
+
+ @Test
+ public void testDrillDown() {
+ List records = getMode().getRecords(TestUtils.createDummyClusterMetrics());
+ for (Record record : records) {
+ assertDrillDown(record, getMode().drillDown(record));
+ }
+ }
+
+ protected abstract void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo);
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeTest.java
new file mode 100644
index 00000000000..ace29b3f234
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeTest.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import java.util.List;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.TestUtils;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class NamespaceModeTest extends ModeTestBase {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(NamespaceModeTest.class);
+
+ @Override
+ protected Mode getMode() {
+ return Mode.NAMESPACE;
+ }
+
+ @Override
+ protected void assertRecords(List records) {
+ TestUtils.assertRecordsInNamespaceMode(records);
+ }
+
+ @Override
+ protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) {
+ assertThat(drillDownInfo.getNextMode(), is(Mode.TABLE));
+ assertThat(drillDownInfo.getInitialFilters().size(), is(1));
+
+ switch (currentRecord.get(Field.NAMESPACE).asString()) {
+ case "default":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("NAMESPACE==default"));
+ break;
+
+ case "namespace":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(),
+ is("NAMESPACE==namespace"));
+ break;
+
+ default:
+ fail();
+ }
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeTest.java
new file mode 100644
index 00000000000..36ad3473eb9
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeTest.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.junit.Assert.assertThat;
+
+import java.util.List;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.TestUtils;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class RegionModeTest extends ModeTestBase {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(RegionModeTest.class);
+
+ @Override
+ protected Mode getMode() {
+ return Mode.REGION;
+ }
+
+ @Override
+ protected void assertRecords(List records) {
+ TestUtils.assertRecordsInRegionMode(records);
+ }
+
+ @Override
+ protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) {
+ assertThat(drillDownInfo, is(nullValue()));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeTest.java
new file mode 100644
index 00000000000..93fa5c463b0
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeTest.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import java.util.List;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.TestUtils;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class RegionServerModeTest extends ModeTestBase {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(RegionServerModeTest.class);
+
+ @Override
+ protected Mode getMode() {
+ return Mode.REGION_SERVER;
+ }
+
+ @Override
+ protected void assertRecords(List records) {
+ TestUtils.assertRecordsInRegionServerMode(records);
+ }
+
+ @Override
+ protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) {
+ assertThat(drillDownInfo.getNextMode(), is(Mode.REGION));
+ assertThat(drillDownInfo.getInitialFilters().size(), is(1));
+
+ switch (currentRecord.get(Field.REGION_SERVER).asString()) {
+ case "host1:1000":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("RS==host1:1000"));
+ break;
+
+ case "host2:1001":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("RS==host2:1001"));
+ break;
+
+ default:
+ fail();
+ }
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecondTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecondTest.java
new file mode 100644
index 00000000000..716ce260e9f
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecondTest.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class RequestCountPerSecondTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(RequestCountPerSecondTest.class);
+
+ @Test
+ public void test() {
+ RequestCountPerSecond requestCountPerSecond = new RequestCountPerSecond();
+
+ requestCountPerSecond.refresh(1000, 300, 100, 200);
+ assertThat(requestCountPerSecond.getRequestCountPerSecond(), is(0L));
+ assertThat(requestCountPerSecond.getReadRequestCountPerSecond(), is(0L));
+ assertThat(requestCountPerSecond.getWriteRequestCountPerSecond(), is(0L));
+ assertThat(requestCountPerSecond.getFilteredReadRequestCountPerSecond(), is(0L));
+
+ requestCountPerSecond.refresh(2000, 1300, 1100, 1200);
+ assertThat(requestCountPerSecond.getRequestCountPerSecond(), is(2000L));
+ assertThat(requestCountPerSecond.getReadRequestCountPerSecond(), is(1000L));
+ assertThat(requestCountPerSecond.getFilteredReadRequestCountPerSecond(), is(1000L));
+ assertThat(requestCountPerSecond.getWriteRequestCountPerSecond(), is(1000L));
+
+ requestCountPerSecond.refresh(12000, 5300, 3100, 2200);
+ assertThat(requestCountPerSecond.getRequestCountPerSecond(), is(500L));
+ assertThat(requestCountPerSecond.getReadRequestCountPerSecond(), is(400L));
+ assertThat(requestCountPerSecond.getFilteredReadRequestCountPerSecond(), is(200L));
+ assertThat(requestCountPerSecond.getWriteRequestCountPerSecond(), is(100L));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/TableModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/TableModeTest.java
new file mode 100644
index 00000000000..11265715c01
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/TableModeTest.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.mode;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import java.util.List;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.TestUtils;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class TableModeTest extends ModeTestBase {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TableModeTest.class);
+
+ @Override
+ protected Mode getMode() {
+ return Mode.TABLE;
+ }
+
+ @Override
+ protected void assertRecords(List records) {
+ TestUtils.assertRecordsInTableMode(records);
+ }
+
+ @Override
+ protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) {
+ assertThat(drillDownInfo.getNextMode(), is(Mode.REGION));
+ assertThat(drillDownInfo.getInitialFilters().size(), is(2));
+
+ String tableName = String.format("%s:%s", currentRecord.get(Field.NAMESPACE).asString(),
+ currentRecord.get(Field.TABLE).asString());
+
+ switch (tableName) {
+ case "default:table1":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("NAMESPACE==default"));
+ assertThat(drillDownInfo.getInitialFilters().get(1).toString(), is("TABLE==table1"));
+ break;
+
+ case "default:table2":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("NAMESPACE==default"));
+ assertThat(drillDownInfo.getInitialFilters().get(1).toString(), is("TABLE==table2"));
+ break;
+
+ case "namespace:table3":
+ assertThat(drillDownInfo.getInitialFilters().get(0).toString(),
+ is("NAMESPACE==namespace"));
+ assertThat(drillDownInfo.getInitialFilters().get(1).toString(), is("TABLE==table3"));
+ break;
+
+ default:
+ fail();
+ }
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenterTest.java
new file mode 100644
index 00000000000..944e54841f3
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenterTest.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.field;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.verify;
+
+import java.util.EnumMap;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class FieldScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(FieldScreenPresenterTest.class);
+
+ @Mock
+ private FieldScreenView fieldScreenView;
+
+ private int sortFieldPosition = -1;
+ private List fields;
+ private EnumMap fieldDisplayMap;
+
+ @Mock
+ private FieldScreenPresenter.ResultListener resultListener;
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ private FieldScreenPresenter fieldScreenPresenter;
+
+ @Before
+ public void setup() {
+ Field sortField = Mode.REGION.getDefaultSortField();
+ fields = Mode.REGION.getFieldInfos().stream()
+ .map(FieldInfo::getField)
+ .collect(Collectors.toList());
+
+ fieldDisplayMap = Mode.REGION.getFieldInfos().stream()
+ .collect(() -> new EnumMap<>(Field.class),
+ (r, fi) -> r.put(fi.getField(), fi.isDisplayByDefault()), (r1, r2) -> {});
+
+ fieldScreenPresenter =
+ new FieldScreenPresenter(fieldScreenView, sortField, fields, fieldDisplayMap, resultListener,
+ topScreenView);
+
+ for (int i = 0; i < fields.size(); i++) {
+ Field field = fields.get(i);
+ if (field == sortField) {
+ sortFieldPosition = i;
+ break;
+ }
+ }
+ }
+
+ @Test
+ public void testInit() {
+ fieldScreenPresenter.init();
+
+ int modeHeaderMaxLength = "#COMPingCell".length();
+ int modeDescriptionMaxLength = "Filtered Read Request Count per second".length();
+
+ verify(fieldScreenView).showFieldScreen(eq("#REQ/S"), eq(fields), eq(fieldDisplayMap),
+ eq(sortFieldPosition), eq(modeHeaderMaxLength), eq(modeDescriptionMaxLength), eq(false));
+ }
+
+ @Test
+ public void testChangeSortField() {
+ fieldScreenPresenter.arrowUp();
+ fieldScreenPresenter.setSortField();
+
+ fieldScreenPresenter.arrowDown();
+ fieldScreenPresenter.arrowDown();
+ fieldScreenPresenter.setSortField();
+
+ fieldScreenPresenter.pageUp();
+ fieldScreenPresenter.setSortField();
+
+ fieldScreenPresenter.pageDown();
+ fieldScreenPresenter.setSortField();
+
+ InOrder inOrder = inOrder(fieldScreenView);
+ inOrder.verify(fieldScreenView).showScreenDescription(eq("LRS"));
+ inOrder.verify(fieldScreenView).showScreenDescription(eq("#READ/S"));
+ inOrder.verify(fieldScreenView).showScreenDescription(eq(fields.get(0).getHeader()));
+ inOrder.verify(fieldScreenView).showScreenDescription(
+ eq(fields.get(fields.size() - 1).getHeader()));
+ }
+
+ @Test
+ public void testSwitchFieldDisplay() {
+ fieldScreenPresenter.switchFieldDisplay();
+ fieldScreenPresenter.switchFieldDisplay();
+
+ InOrder inOrder = inOrder(fieldScreenView);
+ inOrder.verify(fieldScreenView).showField(anyInt(), any(), eq(false), anyBoolean(), anyInt(),
+ anyInt(), anyBoolean());
+ inOrder.verify(fieldScreenView).showField(anyInt(), any(), eq(true), anyBoolean(), anyInt(),
+ anyInt(), anyBoolean());
+ }
+
+ @Test
+ public void testChangeFieldsOrder() {
+ fieldScreenPresenter.turnOnMoveMode();
+ fieldScreenPresenter.arrowUp();
+ fieldScreenPresenter.turnOffMoveMode();
+
+ Field removed = fields.remove(sortFieldPosition);
+ fields.add(sortFieldPosition - 1, removed);
+
+ assertThat(fieldScreenPresenter.transitionToNextScreen(), is(topScreenView));
+ verify(resultListener).accept(any(), eq(fields), any());
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenterTest.java
new file mode 100644
index 00000000000..7c920edf086
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenterTest.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.help;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.argThat;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.verify;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class HelpScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(HelpScreenPresenterTest.class);
+
+ private static final long TEST_REFRESH_DELAY = 5;
+
+ @Mock
+ private HelpScreenView helpScreenView;
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ private HelpScreenPresenter helpScreenPresenter;
+
+ @Before
+ public void setup() {
+ helpScreenPresenter = new HelpScreenPresenter(helpScreenView, TEST_REFRESH_DELAY,
+ topScreenView);
+ }
+
+ @Test
+ public void testInit() {
+ helpScreenPresenter.init();
+ verify(helpScreenView).showHelpScreen(eq(TEST_REFRESH_DELAY), argThat(cds -> cds.length == 14));
+ }
+
+ @Test
+ public void testTransitionToTopScreen() {
+ assertThat(helpScreenPresenter.transitionToNextScreen(), is(topScreenView));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenterTest.java
new file mode 100644
index 00000000000..f1343a02d58
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenterTest.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.mode;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+
+import java.util.Arrays;
+import java.util.function.Consumer;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class ModeScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(ModeScreenPresenterTest.class);
+
+ @Mock
+ private ModeScreenView modeScreenView;
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ @Mock
+ private Consumer resultListener;
+
+ private ModeScreenPresenter createModeScreenPresenter(Mode currentMode) {
+ return new ModeScreenPresenter(modeScreenView, currentMode, resultListener, topScreenView);
+ }
+
+ @Test
+ public void testInit() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.init();
+
+ int modeHeaderMaxLength = Mode.REGION_SERVER.getHeader().length();
+ int modeDescriptionMaxLength = Mode.REGION_SERVER.getDescription().length();
+
+ verify(modeScreenView).showModeScreen(eq(Mode.REGION), eq(Arrays.asList(Mode.values())),
+ eq(Mode.REGION.ordinal()) , eq(modeHeaderMaxLength), eq(modeDescriptionMaxLength));
+ }
+
+ @Test
+ public void testSelectNamespaceMode() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.arrowUp();
+ modeScreenPresenter.arrowUp();
+
+ assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView));
+ verify(resultListener).accept(eq(Mode.NAMESPACE));
+ }
+
+ @Test
+ public void testSelectTableMode() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.arrowUp();
+ assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView));
+ verify(resultListener).accept(eq(Mode.TABLE));
+ }
+
+ @Test
+ public void testSelectRegionMode() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.NAMESPACE);
+
+ modeScreenPresenter.arrowDown();
+ modeScreenPresenter.arrowDown();
+
+ assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView));
+ verify(resultListener).accept(eq(Mode.REGION));
+ }
+
+ @Test
+ public void testSelectRegionServerMode() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.arrowDown();
+
+ assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView));
+ verify(resultListener).accept(eq(Mode.REGION_SERVER));
+ }
+
+ @Test
+ public void testCancelSelectingMode() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.arrowDown();
+ modeScreenPresenter.arrowDown();
+
+ assertThat(modeScreenPresenter.transitionToNextScreen(false), is(topScreenView));
+ verify(resultListener, never()).accept(any());
+ }
+
+ @Test
+ public void testPageUp() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.pageUp();
+
+ assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView));
+ verify(resultListener).accept(eq(Mode.values()[0]));
+ }
+
+ @Test
+ public void testPageDown() {
+ ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION);
+
+ modeScreenPresenter.pageDown();
+
+ assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView));
+ Mode[] modes = Mode.values();
+ verify(resultListener).accept(eq(modes[modes.length - 1]));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenterTest.java
new file mode 100644
index 00000000000..f3c4a24b0ef
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenterTest.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.argThat;
+import static org.mockito.Mockito.verify;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class FilterDisplayModeScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(FilterDisplayModeScreenPresenterTest.class);
+
+ @Mock
+ private FilterDisplayModeScreenView filterDisplayModeScreenView;
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ private FilterDisplayModeScreenPresenter filterDisplayModeScreenPresenter;
+
+ @Before
+ public void setup() {
+ List fields = Mode.REGION.getFieldInfos().stream()
+ .map(FieldInfo::getField)
+ .collect(Collectors.toList());
+
+ List filters = new ArrayList<>();
+ filters.add(RecordFilter.parse("NAMESPACE==namespace", fields, true));
+ filters.add(RecordFilter.parse("TABLE==table", fields, true));
+
+ filterDisplayModeScreenPresenter = new FilterDisplayModeScreenPresenter(
+ filterDisplayModeScreenView, filters, topScreenView);
+ }
+
+ @Test
+ public void testInit() {
+ filterDisplayModeScreenPresenter.init();
+ verify(filterDisplayModeScreenView).showFilters(argThat(filters -> filters.size() == 2
+ && filters.get(0).toString().equals("NAMESPACE==namespace")
+ && filters.get(1).toString().equals("TABLE==table")));
+ }
+
+ @Test
+ public void testReturnToTopScreen() {
+ assertThat(filterDisplayModeScreenPresenter.returnToNextScreen(), is(topScreenView));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenterTest.java
new file mode 100644
index 00000000000..cfe08e0d75d
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenterTest.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Function;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.screen.ScreenView;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class InputModeScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(InputModeScreenPresenterTest.class);
+
+ private static final String TEST_INPUT_MESSAGE = "test input message";
+
+ @Mock
+ private InputModeScreenView inputModeScreenView;
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ @Mock
+ private Function resultListener;
+
+ private InputModeScreenPresenter inputModeScreenPresenter;
+
+ @Before
+ public void setup() {
+ List histories = new ArrayList<>();
+ histories.add("history1");
+ histories.add("history2");
+
+ inputModeScreenPresenter = new InputModeScreenPresenter(inputModeScreenView,
+ TEST_INPUT_MESSAGE, histories, resultListener);
+ }
+
+ @Test
+ public void testInit() {
+ inputModeScreenPresenter.init();
+
+ verify(inputModeScreenView).showInput(eq(TEST_INPUT_MESSAGE), eq(""), eq(0));
+ }
+
+ @Test
+ public void testCharacter() {
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+
+ InOrder inOrder = inOrder(inputModeScreenView);
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ }
+
+ @Test
+ public void testArrowLeftAndRight() {
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+ inputModeScreenPresenter.arrowLeft();
+ inputModeScreenPresenter.arrowLeft();
+ inputModeScreenPresenter.arrowLeft();
+ inputModeScreenPresenter.arrowLeft();
+ inputModeScreenPresenter.arrowRight();
+ inputModeScreenPresenter.arrowRight();
+ inputModeScreenPresenter.arrowRight();
+ inputModeScreenPresenter.arrowRight();
+
+ InOrder inOrder = inOrder(inputModeScreenView);
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(0));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ }
+
+ @Test
+ public void testHomeAndEnd() {
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+ inputModeScreenPresenter.home();
+ inputModeScreenPresenter.home();
+ inputModeScreenPresenter.end();
+ inputModeScreenPresenter.end();
+
+ InOrder inOrder = inOrder(inputModeScreenView);
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(0));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ }
+
+ @Test
+ public void testBackspace() {
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+ inputModeScreenPresenter.backspace();
+ inputModeScreenPresenter.backspace();
+ inputModeScreenPresenter.backspace();
+ inputModeScreenPresenter.backspace();
+
+ InOrder inOrder = inOrder(inputModeScreenView);
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq(""), eq(0));
+ }
+
+ @Test
+ public void testDelete() {
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+ inputModeScreenPresenter.delete();
+ inputModeScreenPresenter.arrowLeft();
+ inputModeScreenPresenter.delete();
+
+ InOrder inOrder = inOrder(inputModeScreenView);
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ }
+
+ @Test
+ public void testHistories() {
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+ inputModeScreenPresenter.arrowUp();
+ inputModeScreenPresenter.arrowUp();
+ inputModeScreenPresenter.arrowUp();
+ inputModeScreenPresenter.arrowDown();
+ inputModeScreenPresenter.arrowDown();
+ inputModeScreenPresenter.arrowDown();
+
+ InOrder inOrder = inOrder(inputModeScreenView);
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("history2"), eq(8));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("history1"), eq(8));
+ inOrder.verify(inputModeScreenView).showInput(any(), eq("history2"), eq(8));
+ }
+
+ @Test
+ public void testReturnToTopScreen() {
+ when(resultListener.apply(any())).thenReturn(topScreenView);
+
+ inputModeScreenPresenter.character('a');
+ inputModeScreenPresenter.character('b');
+ inputModeScreenPresenter.character('c');
+
+ assertThat(inputModeScreenPresenter.returnToNextScreen(), is(topScreenView));
+ verify(resultListener).apply(eq("abc"));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenterTest.java
new file mode 100644
index 00000000000..836caf905db
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenterTest.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.verify;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class MessageModeScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(MessageModeScreenPresenterTest.class);
+
+ private static final String TEST_MESSAGE = "test message";
+
+ @Mock
+ private MessageModeScreenView messageModeScreenView;
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ private MessageModeScreenPresenter messageModeScreenPresenter;
+
+ @Before
+ public void setup() {
+ messageModeScreenPresenter = new MessageModeScreenPresenter(messageModeScreenView,
+ TEST_MESSAGE, topScreenView);
+ }
+
+ @Test
+ public void testInit() {
+ messageModeScreenPresenter.init();
+
+ verify(messageModeScreenView).showMessage(eq(TEST_MESSAGE));
+ }
+
+ @Test
+ public void testReturnToTopScreen() {
+ assertThat(messageModeScreenPresenter.returnToNextScreen(), is(topScreenView));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/PagingTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/PagingTest.java
new file mode 100644
index 00000000000..cf9606b0851
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/PagingTest.java
@@ -0,0 +1,300 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class PagingTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(PagingTest.class);
+
+ @Test
+ public void testArrowUpAndArrowDown() {
+ Paging paging = new Paging();
+ paging.updatePageSize(3);
+ paging.updateRecordsSize(5);
+
+ assertPaging(paging, 0, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 1, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 2, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 3, 1, 4);
+
+ paging.arrowDown();
+ assertPaging(paging, 4, 2, 5);
+
+ paging.arrowDown();
+ assertPaging(paging, 4, 2, 5);
+
+ paging.arrowUp();
+ assertPaging(paging, 3, 2, 5);
+
+ paging.arrowUp();
+ assertPaging(paging, 2, 2, 5);
+
+ paging.arrowUp();
+ assertPaging(paging, 1, 1, 4);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 3);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 3);
+ }
+
+ @Test
+ public void testPageUpAndPageDown() {
+ Paging paging = new Paging();
+ paging.updatePageSize(3);
+ paging.updateRecordsSize(8);
+
+ assertPaging(paging, 0, 0, 3);
+
+ paging.pageDown();
+ assertPaging(paging, 3, 3, 6);
+
+ paging.pageDown();
+ assertPaging(paging, 6, 5, 8);
+
+ paging.pageDown();
+ assertPaging(paging, 7, 5, 8);
+
+ paging.pageDown();
+ assertPaging(paging, 7, 5, 8);
+
+ paging.pageUp();
+ assertPaging(paging, 4, 4, 7);
+
+ paging.pageUp();
+ assertPaging(paging, 1, 1, 4);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 3);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 3);
+ }
+
+ @Test
+ public void testInit() {
+ Paging paging = new Paging();
+ paging.updatePageSize(3);
+ paging.updateRecordsSize(5);
+
+ assertPaging(paging, 0, 0, 3);
+
+ paging.pageDown();
+ paging.pageDown();
+ paging.pageDown();
+ paging.pageDown();
+ paging.init();
+
+ assertPaging(paging, 0, 0, 3);
+ }
+
+ @Test
+ public void testWhenPageSizeGraterThanRecordsSize() {
+ Paging paging = new Paging();
+ paging.updatePageSize(5);
+ paging.updateRecordsSize(3);
+
+ assertPaging(paging, 0, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 1, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 2, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 2, 0, 3);
+
+ paging.arrowUp();
+ assertPaging(paging, 1, 0, 3);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 3);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 3);
+
+ paging.pageDown();
+ assertPaging(paging, 2, 0, 3);
+
+ paging.pageDown();
+ assertPaging(paging, 2, 0, 3);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 3);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 3);
+ }
+
+ @Test
+ public void testWhenPageSizeIsZero() {
+ Paging paging = new Paging();
+ paging.updatePageSize(0);
+ paging.updateRecordsSize(5);
+
+ assertPaging(paging, 0, 0, 0);
+
+ paging.arrowDown();
+ assertPaging(paging, 1, 0, 0);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.pageDown();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 0);
+ }
+
+ @Test
+ public void testWhenRecordsSizeIsZero() {
+ Paging paging = new Paging();
+ paging.updatePageSize(3);
+ paging.updateRecordsSize(0);
+
+ assertPaging(paging, 0, 0, 0);
+
+ paging.arrowDown();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.pageDown();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 0);
+ }
+
+ @Test
+ public void testWhenChangingPageSizeDynamically() {
+ Paging paging = new Paging();
+ paging.updatePageSize(3);
+ paging.updateRecordsSize(5);
+
+ assertPaging(paging, 0, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 1, 0, 3);
+
+ paging.updatePageSize(2);
+ assertPaging(paging, 1, 0, 2);
+
+ paging.arrowDown();
+ assertPaging(paging, 2, 1, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 3, 2, 4);
+
+ paging.updatePageSize(4);
+ assertPaging(paging, 3, 1, 5);
+
+ paging.updatePageSize(5);
+ assertPaging(paging, 3, 0, 5);
+
+ paging.updatePageSize(0);
+ assertPaging(paging, 3, 0, 0);
+
+ paging.arrowDown();
+ assertPaging(paging, 4, 0, 0);
+
+ paging.arrowUp();
+ assertPaging(paging, 3, 0, 0);
+
+ paging.pageDown();
+ assertPaging(paging, 3, 0, 0);
+
+ paging.pageUp();
+ assertPaging(paging, 3, 0, 0);
+
+ paging.updatePageSize(1);
+ assertPaging(paging, 3, 3, 4);
+ }
+
+ @Test
+ public void testWhenChangingRecordsSizeDynamically() {
+ Paging paging = new Paging();
+ paging.updatePageSize(3);
+ paging.updateRecordsSize(5);
+
+ assertPaging(paging, 0, 0, 3);
+
+ paging.updateRecordsSize(2);
+ assertPaging(paging, 0, 0, 2);
+ assertThat(paging.getCurrentPosition(), is(0));
+ assertThat(paging.getPageStartPosition(), is(0));
+ assertThat(paging.getPageEndPosition(), is(2));
+
+ paging.arrowDown();
+ assertPaging(paging, 1, 0, 2);
+
+ paging.updateRecordsSize(3);
+ assertPaging(paging, 1, 0, 3);
+
+ paging.arrowDown();
+ assertPaging(paging, 2, 0, 3);
+
+ paging.updateRecordsSize(1);
+ assertPaging(paging, 0, 0, 1);
+
+ paging.updateRecordsSize(0);
+ assertPaging(paging, 0, 0, 0);
+
+ paging.arrowDown();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.arrowUp();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.pageDown();
+ assertPaging(paging, 0, 0, 0);
+
+ paging.pageUp();
+ assertPaging(paging, 0, 0, 0);
+ }
+
+ private void assertPaging(Paging paging, int currentPosition, int pageStartPosition,
+ int pageEndPosition) {
+ assertThat(paging.getCurrentPosition(), is(currentPosition));
+ assertThat(paging.getPageStartPosition(), is(pageStartPosition));
+ assertThat(paging.getPageEndPosition(), is(pageEndPosition));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModelTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModelTest.java
new file mode 100644
index 00000000000..ae09ada098e
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModelTest.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.RecordFilter;
+import org.apache.hadoop.hbase.hbtop.TestUtils;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.hbtop.field.FieldValue;
+import org.apache.hadoop.hbase.hbtop.mode.Mode;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class TopScreenModelTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TopScreenModelTest.class);
+
+ @Mock
+ private Admin admin;
+
+ private TopScreenModel topScreenModel;
+
+ private List fields;
+
+ @Before
+ public void setup() throws IOException {
+ when(admin.getClusterMetrics()).thenReturn(TestUtils.createDummyClusterMetrics());
+ topScreenModel = new TopScreenModel(admin, Mode.REGION);
+
+ fields = Mode.REGION.getFieldInfos().stream()
+ .map(FieldInfo::getField)
+ .collect(Collectors.toList());
+ }
+
+ @Test
+ public void testSummary() {
+ topScreenModel.refreshMetricsData();
+ Summary summary = topScreenModel.getSummary();
+ TestUtils.assertSummary(summary);
+ }
+
+ @Test
+ public void testRecords() {
+ // Region Mode
+ topScreenModel.refreshMetricsData();
+ TestUtils.assertRecordsInRegionMode(topScreenModel.getRecords());
+
+ // Namespace Mode
+ topScreenModel.switchMode(Mode.NAMESPACE, null, false);
+ topScreenModel.refreshMetricsData();
+ TestUtils.assertRecordsInNamespaceMode(topScreenModel.getRecords());
+
+ // Table Mode
+ topScreenModel.switchMode(Mode.TABLE, null, false);
+ topScreenModel.refreshMetricsData();
+ TestUtils.assertRecordsInTableMode(topScreenModel.getRecords());
+
+ // Namespace Mode
+ topScreenModel.switchMode(Mode.REGION_SERVER, null, false);
+ topScreenModel.refreshMetricsData();
+ TestUtils.assertRecordsInRegionServerMode(topScreenModel.getRecords());
+ }
+
+ @Test
+ public void testSort() {
+ // The sort key is LOCALITY
+ topScreenModel.setSortFieldAndFields(Field.LOCALITY, fields);
+
+ FieldValue previous = null;
+
+ // Test for ascending sort
+ topScreenModel.refreshMetricsData();
+
+ for (Record record : topScreenModel.getRecords()) {
+ FieldValue current = record.get(Field.LOCALITY);
+ if (previous != null) {
+ assertTrue(current.compareTo(previous) < 0);
+ }
+ previous = current;
+ }
+
+ // Test for descending sort
+ topScreenModel.switchSortOrder();
+ topScreenModel.refreshMetricsData();
+
+ previous = null;
+ for (Record record : topScreenModel.getRecords()) {
+ FieldValue current = record.get(Field.LOCALITY);
+ if (previous != null) {
+ assertTrue(current.compareTo(previous) > 0);
+ }
+ previous = current;
+ }
+ }
+
+ @Test
+ public void testFilters() {
+ topScreenModel.addFilter("TABLE==table1", false);
+ topScreenModel.refreshMetricsData();
+ for (Record record : topScreenModel.getRecords()) {
+ FieldValue value = record.get(Field.TABLE);
+ assertThat(value.asString(), is("table1"));
+ }
+
+ topScreenModel.clearFilters();
+ topScreenModel.addFilter("TABLE==TABLE1", false);
+ topScreenModel.refreshMetricsData();
+ assertThat(topScreenModel.getRecords().size(), is(0));
+
+ // Test for ignore case
+ topScreenModel.clearFilters();
+ topScreenModel.addFilter("TABLE==TABLE1", true);
+ topScreenModel.refreshMetricsData();
+ for (Record record : topScreenModel.getRecords()) {
+ FieldValue value = record.get(Field.TABLE);
+ assertThat(value.asString(), is("table1"));
+ }
+ }
+
+ @Test
+ public void testFilterHistories() {
+ topScreenModel.addFilter("TABLE==table1", false);
+ topScreenModel.addFilter("TABLE==table2", false);
+ topScreenModel.addFilter("TABLE==table3", false);
+
+ assertThat(topScreenModel.getFilterHistories().get(0), is("TABLE==table1"));
+ assertThat(topScreenModel.getFilterHistories().get(1), is("TABLE==table2"));
+ assertThat(topScreenModel.getFilterHistories().get(2), is("TABLE==table3"));
+ }
+
+ @Test
+ public void testSwitchMode() {
+ topScreenModel.switchMode(Mode.TABLE, null, false);
+ assertThat(topScreenModel.getCurrentMode(), is(Mode.TABLE));
+
+ // Test for initialFilters
+ List initialFilters = Arrays.asList(
+ RecordFilter.parse("TABLE==table1", fields, true),
+ RecordFilter.parse("TABLE==table2", fields, true));
+
+ topScreenModel.switchMode(Mode.TABLE, initialFilters, false);
+
+ assertThat(topScreenModel.getFilters().size(), is(initialFilters.size()));
+ for (int i = 0; i < topScreenModel.getFilters().size(); i++) {
+ assertThat(topScreenModel.getFilters().get(i).toString(),
+ is(initialFilters.get(i).toString()));
+ }
+
+ // Test when keepSortFieldAndSortOrderIfPossible is true
+ topScreenModel.setSortFieldAndFields(Field.NAMESPACE, fields);
+ topScreenModel.switchMode(Mode.NAMESPACE, null, true);
+ assertThat(topScreenModel.getCurrentSortField(), is(Field.NAMESPACE));
+ }
+
+ @Test
+ public void testDrillDown() {
+ topScreenModel.switchMode(Mode.TABLE, null, false);
+ topScreenModel.setSortFieldAndFields(Field.NAMESPACE, fields);
+ topScreenModel.refreshMetricsData();
+
+ boolean success = topScreenModel.drillDown(topScreenModel.getRecords().get(0));
+ assertThat(success, is(true));
+
+ assertThat(topScreenModel.getFilters().get(0).toString(), is("NAMESPACE==namespace"));
+ assertThat(topScreenModel.getFilters().get(1).toString(), is("TABLE==table3"));
+ assertThat(topScreenModel.getCurrentSortField(), is(Field.NAMESPACE));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenterTest.java
new file mode 100644
index 00000000000..5f42767e6dd
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenterTest.java
@@ -0,0 +1,256 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.screen.top;
+
+import static org.apache.hadoop.hbase.hbtop.Record.entry;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.argThat;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.hbtop.Record;
+import org.apache.hadoop.hbase.hbtop.field.Field;
+import org.apache.hadoop.hbase.hbtop.field.FieldInfo;
+import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.InOrder;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+
+@Category(SmallTests.class)
+@RunWith(MockitoJUnitRunner.class)
+public class TopScreenPresenterTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TopScreenPresenterTest.class);
+
+ private static final List TEST_FIELD_INFOS = Arrays.asList(
+ new FieldInfo(Field.REGION, 10, true),
+ new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true),
+ new FieldInfo(Field.LOCALITY, 10, true)
+ );
+
+ private static final List TEST_RECORDS = Arrays.asList(
+ Record.ofEntries(
+ entry(Field.REGION, "region1"),
+ entry(Field.REQUEST_COUNT_PER_SECOND, 1L),
+ entry(Field.LOCALITY, 0.3f)),
+ Record.ofEntries(
+ entry(Field.REGION, "region2"),
+ entry(Field.REQUEST_COUNT_PER_SECOND, 2L),
+ entry(Field.LOCALITY, 0.2f)),
+ Record.ofEntries(
+ entry(Field.REGION, "region3"),
+ entry(Field.REQUEST_COUNT_PER_SECOND, 3L),
+ entry(Field.LOCALITY, 0.1f))
+ );
+
+ private static final Summary TEST_SUMMARY = new Summary(
+ "00:00:01", "3.0.0-SNAPSHOT", "01234567-89ab-cdef-0123-456789abcdef",
+ 3, 2, 1, 6, 1, 3.0, 300);
+
+ @Mock
+ private TopScreenView topScreenView;
+
+ @Mock
+ private TopScreenModel topScreenModel;
+
+ private TopScreenPresenter topScreenPresenter;
+
+ @Before
+ public void setup() {
+ when(topScreenView.getTerminalSize()).thenReturn(new TerminalSize(100, 100));
+ when(topScreenView.getPageSize()).thenReturn(100);
+
+ when(topScreenModel.getFieldInfos()).thenReturn(TEST_FIELD_INFOS);
+ when(topScreenModel.getFields()).thenReturn(TEST_FIELD_INFOS.stream()
+ .map(FieldInfo::getField).collect(Collectors.toList()));
+ when(topScreenModel.getRecords()).thenReturn(TEST_RECORDS);
+ when(topScreenModel.getSummary()).thenReturn(TEST_SUMMARY);
+
+ topScreenPresenter = new TopScreenPresenter(topScreenView, 3000, topScreenModel);
+ }
+
+ @Test
+ public void testRefresh() {
+ topScreenPresenter.init();
+ topScreenPresenter.refresh(true);
+
+ verify(topScreenView).showTopScreen(argThat(this::assertSummary),
+ argThat(this::assertHeaders), argThat(this::assertRecords),
+ argThat(selectedRecord -> assertSelectedRecord(selectedRecord, 0)));
+ }
+
+ @Test
+ public void testVerticalScrolling() {
+ topScreenPresenter.init();
+ topScreenPresenter.refresh(true);
+
+ topScreenPresenter.arrowDown();
+ topScreenPresenter.arrowDown();
+ topScreenPresenter.arrowDown();
+
+ topScreenPresenter.arrowDown();
+ topScreenPresenter.arrowDown();
+ topScreenPresenter.arrowDown();
+
+ topScreenPresenter.arrowUp();
+ topScreenPresenter.arrowUp();
+ topScreenPresenter.arrowUp();
+
+ topScreenPresenter.pageDown();
+ topScreenPresenter.pageDown();
+
+ topScreenPresenter.pageUp();
+ topScreenPresenter.pageUp();
+
+ InOrder inOrder = inOrder(topScreenView);
+ verifyVerticalScrolling(inOrder, 0);
+
+ verifyVerticalScrolling(inOrder, 1);
+ verifyVerticalScrolling(inOrder, 2);
+ verifyVerticalScrolling(inOrder, 2);
+
+ verifyVerticalScrolling(inOrder, 1);
+ verifyVerticalScrolling(inOrder, 0);
+ verifyVerticalScrolling(inOrder, 0);
+
+ verifyVerticalScrolling(inOrder, 2);
+ verifyVerticalScrolling(inOrder, 2);
+
+ verifyVerticalScrolling(inOrder, 0);
+ verifyVerticalScrolling(inOrder, 0);
+ }
+
+ private void verifyVerticalScrolling(InOrder inOrder, int expectedSelectedRecodeIndex) {
+ inOrder.verify(topScreenView).showTopScreen(any(), any(), any(),
+ argThat(selectedRecord -> assertSelectedRecord(selectedRecord, expectedSelectedRecodeIndex)));
+ }
+
+ @Test
+ public void testHorizontalScrolling() {
+ topScreenPresenter.init();
+ topScreenPresenter.refresh(true);
+
+ topScreenPresenter.arrowRight();
+ topScreenPresenter.arrowRight();
+ topScreenPresenter.arrowRight();
+
+ topScreenPresenter.arrowLeft();
+ topScreenPresenter.arrowLeft();
+ topScreenPresenter.arrowLeft();
+
+ topScreenPresenter.end();
+ topScreenPresenter.end();
+
+ topScreenPresenter.home();
+ topScreenPresenter.home();
+
+ InOrder inOrder = inOrder(topScreenView);
+ verifyHorizontalScrolling(inOrder, 3);
+
+ verifyHorizontalScrolling(inOrder, 2);
+ verifyHorizontalScrolling(inOrder, 1);
+ verifyHorizontalScrolling(inOrder, 1);
+
+ verifyHorizontalScrolling(inOrder, 2);
+ verifyHorizontalScrolling(inOrder, 3);
+ verifyHorizontalScrolling(inOrder, 3);
+
+ verifyHorizontalScrolling(inOrder, 1);
+ verifyHorizontalScrolling(inOrder, 1);
+
+ verifyHorizontalScrolling(inOrder, 3);
+ verifyHorizontalScrolling(inOrder, 3);
+ }
+
+ private void verifyHorizontalScrolling(InOrder inOrder, int expectedHeaderCount) {
+ inOrder.verify(topScreenView).showTopScreen(any(),
+ argThat(headers -> headers.size() == expectedHeaderCount), any(), any());
+ }
+
+ private boolean assertSummary(Summary actual) {
+ return actual.getCurrentTime().equals(TEST_SUMMARY.getCurrentTime())
+ && actual.getVersion().equals(TEST_SUMMARY.getVersion())
+ && actual.getClusterId().equals(TEST_SUMMARY.getClusterId())
+ && actual.getServers() == TEST_SUMMARY.getServers()
+ && actual.getLiveServers() == TEST_SUMMARY.getLiveServers()
+ && actual.getDeadServers() == TEST_SUMMARY.getDeadServers()
+ && actual.getRegionCount() == TEST_SUMMARY.getRegionCount()
+ && actual.getRitCount() == TEST_SUMMARY.getRitCount()
+ && actual.getAverageLoad() == TEST_SUMMARY.getAverageLoad()
+ && actual.getAggregateRequestPerSecond() == TEST_SUMMARY.getAggregateRequestPerSecond();
+ }
+
+ private boolean assertHeaders(List actual) {
+ List expected =
+ TEST_FIELD_INFOS.stream().map(fi -> new Header(fi.getField(), fi.getDefaultLength()))
+ .collect(Collectors.toList());
+
+ if (actual.size() != expected.size()) {
+ return false;
+ }
+
+ for (int i = 0; i < actual.size(); i++) {
+ if (actual.get(i).getField() != expected.get(i).getField()) {
+ return false;
+ }
+ if (actual.get(i).getLength() != expected.get(i).getLength()) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private boolean assertRecords(List actual) {
+ if (actual.size() != TEST_RECORDS.size()) {
+ return false;
+ }
+
+ for (int i = 0; i < actual.size(); i++) {
+ if (!assertRecord(actual.get(i), TEST_RECORDS.get(i))) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private boolean assertSelectedRecord(Record actual, int expectedSelectedRecodeIndex) {
+ return assertRecord(actual, TEST_RECORDS.get(expectedSelectedRecodeIndex));
+ }
+
+ private boolean assertRecord(Record actual, Record expected) {
+ return actual.get(Field.REGION).equals(expected.get(Field.REGION)) && actual
+ .get(Field.REQUEST_COUNT_PER_SECOND).equals(expected.get(Field.REQUEST_COUNT_PER_SECOND))
+ && actual.get(Field.LOCALITY).equals(expected.get(Field.LOCALITY));
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/CursorTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/CursorTest.java
new file mode 100644
index 00000000000..bf3b01942ff
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/CursorTest.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hbase.hbtop.terminal.impl.TerminalImpl;
+
+
+public final class CursorTest {
+
+ private CursorTest() {
+ }
+
+ public static void main(String[] args) throws Exception {
+ try (Terminal terminal = new TerminalImpl()) {
+ terminal.refresh();
+ terminal.setCursorPosition(0, 0);
+
+ terminal.getTerminalPrinter(0).print("aaa").endOfLine();
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.getTerminalPrinter(0).print("bbb").endOfLine();
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.setCursorPosition(1, 0);
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.setCursorPosition(2, 0);
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.setCursorPosition(3, 0);
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.setCursorPosition(0, 1);
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.getTerminalPrinter(1).print("ccc").endOfLine();
+ terminal.refresh();
+ TimeUnit.SECONDS.sleep(1);
+
+ terminal.getTerminalPrinter(3).print("Press any key to finish").endOfLine();
+ terminal.refresh();
+
+ while (true) {
+ KeyPress keyPress = terminal.pollKeyPress();
+ if (keyPress == null) {
+ TimeUnit.MILLISECONDS.sleep(100);
+ continue;
+ }
+ break;
+ }
+ }
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/KeyPressTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/KeyPressTest.java
new file mode 100644
index 00000000000..3c53e8db725
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/KeyPressTest.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hbase.hbtop.terminal.impl.TerminalImpl;
+
+
+public final class KeyPressTest {
+
+ private KeyPressTest() {
+ }
+
+ public static void main(String[] args) throws Exception {
+ try (Terminal terminal = new TerminalImpl()) {
+ terminal.hideCursor();
+ terminal.refresh();
+
+ while (true) {
+ KeyPress keyPress = terminal.pollKeyPress();
+ if (keyPress == null) {
+ TimeUnit.MILLISECONDS.sleep(100);
+ continue;
+ }
+
+ terminal.getTerminalPrinter(0).print(keyPress.toString()).endOfLine();
+ terminal.refresh();
+
+ if (keyPress.getType() == KeyPress.Type.F12) {
+ break;
+ }
+ }
+ }
+ }
+}
diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinterTest.java
new file mode 100644
index 00000000000..2054d80bb23
--- /dev/null
+++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinterTest.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.hbtop.terminal;
+
+import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hbase.hbtop.terminal.impl.TerminalImpl;
+
+
+public final class TerminalPrinterTest {
+
+ private TerminalPrinterTest() {
+ }
+
+ public static void main(String[] args) throws Exception {
+ try (Terminal terminal = new TerminalImpl()) {
+ terminal.hideCursor();
+ terminal.refresh();
+
+ TerminalPrinter printer = terminal.getTerminalPrinter(0);
+ printer.print("Normal string").endOfLine();
+ printer.startHighlight().print("Highlighted string").stopHighlight().endOfLine();
+ printer.startBold().print("Bold string").stopBold().endOfLine();
+ printer.startHighlight().startBold().print("Highlighted bold string")
+ .stopBold().stopHighlight().endOfLine();
+ printer.endOfLine();
+ printer.print("Press any key to finish").endOfLine();
+
+ terminal.refresh();
+
+ while (true) {
+ KeyPress keyPress = terminal.pollKeyPress();
+ if (keyPress == null) {
+ TimeUnit.MILLISECONDS.sleep(100);
+ continue;
+ }
+ break;
+ }
+ }
+ }
+}
diff --git a/pom.xml b/pom.xml
index ff0757ffd8a..7b0d2ba77c4 100755
--- a/pom.xml
+++ b/pom.xml
@@ -89,6 +89,7 @@
hbase-metrics-api
hbase-metrics
hbase-zookeeper
+ hbase-hbtop
scm:git:git://gitbox.apache.org/repos/asf/hbase.git
@@ -1655,6 +1656,11 @@
test-jar
test
+
+ hbase-hbtop
+ org.apache.hbase
+ ${project.version}
+
org.apache.hbase
hbase-shaded-client