HBASE-26192 Master UI hbck should provide a JSON formatted output option
This commit is contained in:
parent
33ea96cdf7
commit
d34c21dac6
|
@ -137,6 +137,7 @@ import org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
|
|||
import org.apache.hadoop.hbase.master.cleaner.SnapshotCleanerChore;
|
||||
import org.apache.hadoop.hbase.master.hbck.HbckChore;
|
||||
import org.apache.hadoop.hbase.master.http.MasterDumpServlet;
|
||||
import org.apache.hadoop.hbase.master.http.MasterHbckServlet;
|
||||
import org.apache.hadoop.hbase.master.http.MasterRedirectServlet;
|
||||
import org.apache.hadoop.hbase.master.http.MasterStatusServlet;
|
||||
import org.apache.hadoop.hbase.master.http.api_v1.ResourceConfigFactory;
|
||||
|
@ -724,6 +725,7 @@ public class HMaster extends HBaseServerBase<MasterRpcServices> implements Maste
|
|||
protected void configureInfoServer(InfoServer infoServer) {
|
||||
infoServer.addUnprivilegedServlet("master-status", "/master-status", MasterStatusServlet.class);
|
||||
infoServer.addUnprivilegedServlet("api_v1", "/api/v1/*", buildApiV1Servlet());
|
||||
infoServer.addUnprivilegedServlet("hbck", "/hbck", MasterHbckServlet.class);
|
||||
|
||||
infoServer.setAttribute(MASTER, this);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.master.http;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.master.HbckChore;
|
||||
import org.apache.hadoop.hbase.master.http.gson.GsonFactory;
|
||||
import org.apache.hadoop.hbase.master.janitor.CatalogJanitor;
|
||||
import org.apache.hadoop.hbase.master.janitor.Report;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.gson.Gson;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class MasterHbckServlet extends HttpServlet {
|
||||
|
||||
public static final String START_TIMESTAMP = "start_timestamp";
|
||||
public static final String END_TIMESTAMP = "end_timestamp";
|
||||
public static final String INCONSISTENT_REGIONS = "inconsistent_regions";
|
||||
public static final String ORPHAN_REGIONS_ON_RS = "orphan_regions_on_rs";
|
||||
public static final String ORPHAN_REGIONS_ON_FS = "orphan_regions_on_fs";
|
||||
public static final String HOLES = "holes";
|
||||
public static final String OVERLAPS = "overlaps";
|
||||
public static final String UNKNOWN_SERVERS = "unknown_servers";
|
||||
public static final String EMPTY_REGIONINFO = "empty_regioninfo";
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MasterHbckServlet.class);
|
||||
private static final Gson GSON = GsonFactory.buildGson();
|
||||
|
||||
@Override
|
||||
public void doGet(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
final HMaster master = (HMaster) getServletContext().getAttribute(HMaster.MASTER);
|
||||
if (!master.isInitialized()) {
|
||||
LOG.warn("Master is not initialized yet");
|
||||
sendError(response, HttpServletResponse.SC_SERVICE_UNAVAILABLE,
|
||||
"master is not initialized yet");
|
||||
return;
|
||||
}
|
||||
final HbckChore hbckChore = master.getHbckChore();
|
||||
if (hbckChore == null || hbckChore.isDisabled()) {
|
||||
LOG.warn("Hbck chore is disabled");
|
||||
sendError(response, HttpServletResponse.SC_SERVICE_UNAVAILABLE, "Hbck chore is disabled");
|
||||
return;
|
||||
}
|
||||
if (!Boolean.parseBoolean(request.getParameter("cache"))) {
|
||||
try {
|
||||
master.getMasterRpcServices().runHbckChore(null, null);
|
||||
} catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException se) {
|
||||
LOG.warn("Failed generating a new hbck chore report; using cache", se);
|
||||
}
|
||||
try {
|
||||
master.getMasterRpcServices().runCatalogScan(null, null);
|
||||
} catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException se) {
|
||||
LOG.warn("Failed generating a new catalogjanitor report; using cache", se);
|
||||
}
|
||||
}
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
result.put(START_TIMESTAMP, hbckChore.getCheckingStartTimestamp());
|
||||
result.put(END_TIMESTAMP, hbckChore.getCheckingEndTimestamp());
|
||||
final Map<String, Pair<ServerName, List<ServerName>>> inconsistentRegions =
|
||||
hbckChore.getInconsistentRegions();
|
||||
if (inconsistentRegions != null && !inconsistentRegions.isEmpty()) {
|
||||
result.put(INCONSISTENT_REGIONS, inconsistentRegions);
|
||||
}
|
||||
final Map<String, ServerName> orphanRegionsOnRS = hbckChore.getOrphanRegionsOnRS();
|
||||
if (orphanRegionsOnRS != null && !orphanRegionsOnRS.isEmpty()) {
|
||||
result.put(ORPHAN_REGIONS_ON_RS, orphanRegionsOnRS);
|
||||
}
|
||||
final Map<String, Path> orphanRegionsOnFS = hbckChore.getOrphanRegionsOnFS();
|
||||
if (orphanRegionsOnFS != null && !orphanRegionsOnFS.isEmpty()) {
|
||||
result.put(ORPHAN_REGIONS_ON_FS, orphanRegionsOnFS);
|
||||
}
|
||||
final CatalogJanitor janitor = master.getCatalogJanitor();
|
||||
if (janitor != null) {
|
||||
final Report report = janitor.getLastReport();
|
||||
if (report != null && !report.isEmpty()) {
|
||||
List<Pair<RegionInfo, RegionInfo>> holes = report.getHoles();
|
||||
if (holes != null && !holes.isEmpty()) {
|
||||
result.put(HOLES, holes);
|
||||
}
|
||||
List<Pair<RegionInfo, RegionInfo>> overlaps = report.getOverlaps();
|
||||
if (overlaps != null && !overlaps.isEmpty()) {
|
||||
result.put(OVERLAPS, overlaps);
|
||||
}
|
||||
List<Pair<RegionInfo, ServerName>> unknownServers = report.getUnknownServers();
|
||||
if (unknownServers != null && !unknownServers.isEmpty()) {
|
||||
result.put(UNKNOWN_SERVERS, unknownServers);
|
||||
}
|
||||
List<byte[]> emptyRegionInfo = report.getEmptyRegionInfo();
|
||||
if (!emptyRegionInfo.isEmpty()) {
|
||||
result.put(EMPTY_REGIONINFO, emptyRegionInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
response.setContentType("application/json");
|
||||
PrintWriter out = response.getWriter();
|
||||
out.write(GSON.toJson(result));
|
||||
out.write('\n');
|
||||
}
|
||||
|
||||
private static void sendError(HttpServletResponse response, int code, String message)
|
||||
throws IOException {
|
||||
response.setContentType("application/json");
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
result.put("error", message);
|
||||
response.setStatus(code);
|
||||
PrintWriter out = response.getWriter();
|
||||
out.write(GSON.toJson(result));
|
||||
out.write('\n');
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,240 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.master.http;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import javax.servlet.ServletConfig;
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.master.HbckChore;
|
||||
import org.apache.hadoop.hbase.master.MasterRpcServices;
|
||||
import org.apache.hadoop.hbase.master.http.gson.GsonFactory;
|
||||
import org.apache.hadoop.hbase.master.janitor.CatalogJanitor;
|
||||
import org.apache.hadoop.hbase.master.janitor.Report;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.gson.Gson;
|
||||
import org.apache.hbase.thirdparty.com.google.gson.reflect.TypeToken;
|
||||
|
||||
/**
|
||||
* Tests for the master hbck servlet.
|
||||
*/
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestMasterHbckServlet {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestMasterHbckServlet.class);
|
||||
|
||||
static final ServerName FAKE_HOST = ServerName.valueOf("fakehost", 12345, 1234567890);
|
||||
static final ServerName FAKE_HOST_2 = ServerName.valueOf("fakehost2", 12345, 1234567890);
|
||||
static final TableDescriptor FAKE_TABLE =
|
||||
TableDescriptorBuilder.newBuilder(TableName.valueOf("mytable")).build();
|
||||
static final RegionInfo FAKE_HRI = RegionInfoBuilder.newBuilder(FAKE_TABLE.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("b")).build();
|
||||
static final RegionInfo FAKE_HRI_2 = RegionInfoBuilder.newBuilder(FAKE_TABLE.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("c")).build();
|
||||
static final RegionInfo FAKE_HRI_3 = RegionInfoBuilder.newBuilder(FAKE_TABLE.getTableName())
|
||||
.setStartKey(Bytes.toBytes("d")).setEndKey(Bytes.toBytes("e")).build();
|
||||
static final Path FAKE_PATH = new Path(
|
||||
"/hbase/data/default/" + FAKE_TABLE.getTableName() + "/" + FAKE_HRI_3.getEncodedName());
|
||||
static final long FAKE_START_TIMESTAMP = System.currentTimeMillis();
|
||||
static final long FAKE_END_TIMESTAMP = System.currentTimeMillis() + 1000;
|
||||
static final Gson GSON = GsonFactory.buildGson();
|
||||
|
||||
private HMaster master;
|
||||
|
||||
@Before
|
||||
public void setupMocks() {
|
||||
// Fake inconsistentRegions
|
||||
Map<String, Pair<ServerName, List<ServerName>>> inconsistentRegions = new HashMap<>();
|
||||
inconsistentRegions.put(FAKE_HRI.getEncodedName(),
|
||||
new Pair<>(FAKE_HOST, Arrays.asList(FAKE_HOST_2)));
|
||||
|
||||
// Fake orphanRegionsOnRS
|
||||
Map<String, ServerName> orphanRegionsOnRS = new HashMap<>();
|
||||
|
||||
// Fake orphanRegionsOnFS
|
||||
Map<String, Path> orphanRegionsOnFS = new HashMap<>();
|
||||
orphanRegionsOnFS.put(FAKE_HRI_3.getEncodedName(), FAKE_PATH);
|
||||
|
||||
// Fake HbckChore
|
||||
HbckChore hbckChore = mock(HbckChore.class);
|
||||
doReturn(FAKE_START_TIMESTAMP).when(hbckChore).getCheckingStartTimestamp();
|
||||
doReturn(FAKE_END_TIMESTAMP).when(hbckChore).getCheckingEndTimestamp();
|
||||
doReturn(inconsistentRegions).when(hbckChore).getInconsistentRegions();
|
||||
doReturn(orphanRegionsOnRS).when(hbckChore).getOrphanRegionsOnRS();
|
||||
doReturn(orphanRegionsOnFS).when(hbckChore).getOrphanRegionsOnFS();
|
||||
|
||||
// Fake region holes
|
||||
List<Pair<RegionInfo, RegionInfo>> holes = new ArrayList<>();
|
||||
|
||||
// Fake region overlaps
|
||||
List<Pair<RegionInfo, RegionInfo>> overlaps = new ArrayList<>();
|
||||
overlaps.add(new Pair<>(FAKE_HRI, FAKE_HRI_2));
|
||||
|
||||
// Fake unknown servers
|
||||
List<Pair<RegionInfo, ServerName>> unknownServers = new ArrayList<>();
|
||||
unknownServers.add(new Pair<>(FAKE_HRI, FAKE_HOST_2));
|
||||
|
||||
// Fake empty region info
|
||||
List<String> emptyRegionInfo = new ArrayList<>();
|
||||
emptyRegionInfo.add(FAKE_HRI_3.getEncodedName());
|
||||
|
||||
// Fake catalog janitor report
|
||||
Report report = mock(Report.class);
|
||||
doReturn(FAKE_START_TIMESTAMP).when(report).getCreateTime();
|
||||
doReturn(holes).when(report).getHoles();
|
||||
doReturn(overlaps).when(report).getOverlaps();
|
||||
doReturn(unknownServers).when(report).getUnknownServers();
|
||||
doReturn(emptyRegionInfo).when(report).getEmptyRegionInfo();
|
||||
|
||||
// Fake CatalogJanitor
|
||||
CatalogJanitor janitor = mock(CatalogJanitor.class);
|
||||
doReturn(report).when(janitor).getLastReport();
|
||||
|
||||
// Fake master
|
||||
|
||||
master = mock(HMaster.class);
|
||||
doReturn(HBaseConfiguration.create()).when(master).getConfiguration();
|
||||
doReturn(true).when(master).isInitialized();
|
||||
doReturn(Optional.of(FAKE_HOST)).when(master).getActiveMaster();
|
||||
doReturn(janitor).when(master).getCatalogJanitor();
|
||||
doReturn(hbckChore).when(master).getHbckChore();
|
||||
MasterRpcServices services = mock(MasterRpcServices.class);
|
||||
doReturn(services).when(master).getRpcServices();
|
||||
doReturn(services).when(master).getMasterRpcServices();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testHbckServletWithMocks() throws Exception {
|
||||
// Set up request and response mocks
|
||||
HttpServletRequest request = mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = mock(HttpServletResponse.class);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
PrintWriter writer = new PrintWriter(out);
|
||||
doReturn(writer).when(response).getWriter();
|
||||
|
||||
// Set up servlet config
|
||||
ServletContext context = mock(ServletContext.class);
|
||||
Map<String, Object> configMap = new HashMap<>();
|
||||
doAnswer(new Answer<Object>() {
|
||||
@Override
|
||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||
return configMap.get(invocation.getArgument(0));
|
||||
}
|
||||
}).when(context).getAttribute(anyString());
|
||||
doAnswer(new Answer<Void>() {
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
configMap.put(invocation.getArgument(0), invocation.getArgument(1));
|
||||
return null;
|
||||
}
|
||||
}).when(context).setAttribute(anyString(), any());
|
||||
ServletConfig config = mock(ServletConfig.class);
|
||||
doReturn(context).when(config).getServletContext();
|
||||
|
||||
// Instantiate the servlet and call doGet
|
||||
MasterHbckServlet servlet = new MasterHbckServlet();
|
||||
servlet.init(config);
|
||||
servlet.getServletContext().setAttribute(HMaster.MASTER, master);
|
||||
servlet.doGet(request, response);
|
||||
// Flush the writer
|
||||
writer.flush();
|
||||
|
||||
// Response should be now be in 'out'
|
||||
String responseString = new String(out.toByteArray(), StandardCharsets.UTF_8);
|
||||
Map<String, Object> result =
|
||||
GSON.fromJson(responseString, new TypeToken<Map<String, Object>>() {
|
||||
}.getType());
|
||||
|
||||
// Check that the result is as expected
|
||||
long startTimestamp = ((Double) result.get(MasterHbckServlet.START_TIMESTAMP)).longValue();
|
||||
assertEquals(FAKE_START_TIMESTAMP, startTimestamp);
|
||||
long endTimestamp = ((Double) result.get(MasterHbckServlet.END_TIMESTAMP)).longValue();
|
||||
assertEquals(FAKE_END_TIMESTAMP, endTimestamp);
|
||||
Map<String, Pair<ServerName, List<ServerName>>> inconsistentRegions = (Map<String,
|
||||
Pair<ServerName, List<ServerName>>>) result.get(MasterHbckServlet.INCONSISTENT_REGIONS);
|
||||
assertNotNull(inconsistentRegions);
|
||||
assertEquals(1, inconsistentRegions.size());
|
||||
assertNotNull(inconsistentRegions.get(FAKE_HRI.getEncodedName()));
|
||||
assertNull(inconsistentRegions.get(FAKE_HRI_3.getEncodedName()));
|
||||
Map<String, ServerName> orphanRegionsOnRS =
|
||||
(Map<String, ServerName>) result.get(MasterHbckServlet.ORPHAN_REGIONS_ON_RS);
|
||||
assertNull(orphanRegionsOnRS);
|
||||
Map<String, Path> orphanRegionsOnFS =
|
||||
(Map<String, Path>) result.get(MasterHbckServlet.ORPHAN_REGIONS_ON_FS);
|
||||
assertNotNull(orphanRegionsOnFS);
|
||||
assertEquals(1, orphanRegionsOnFS.size());
|
||||
assertNull(orphanRegionsOnFS.get(FAKE_HRI.getEncodedName()));
|
||||
assertNotNull(orphanRegionsOnFS.get(FAKE_HRI_3.getEncodedName()));
|
||||
List<Pair<RegionInfo, RegionInfo>> holes =
|
||||
(List<Pair<RegionInfo, RegionInfo>>) result.get(MasterHbckServlet.HOLES);
|
||||
assertNull(holes);
|
||||
List<Pair<RegionInfo, RegionInfo>> overlaps =
|
||||
(List<Pair<RegionInfo, RegionInfo>>) result.get(MasterHbckServlet.OVERLAPS);
|
||||
assertNotNull(overlaps);
|
||||
assertEquals(1, overlaps.size());
|
||||
List<Pair<RegionInfo, ServerName>> unknownServers =
|
||||
(List<Pair<RegionInfo, ServerName>>) result.get(MasterHbckServlet.UNKNOWN_SERVERS);
|
||||
assertNotNull(unknownServers);
|
||||
assertEquals(1, unknownServers.size());
|
||||
List<byte[]> emptyRegionInfo = (List<byte[]>) result.get(MasterHbckServlet.EMPTY_REGIONINFO);
|
||||
assertNotNull(emptyRegionInfo);
|
||||
assertEquals(1, emptyRegionInfo.size());
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue