HDFS-4373. Merging change r1502328 from trunk to branch-2.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1502423 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris Nauroth 2013-07-12 04:53:17 +00:00
parent 5039a956ff
commit 9f5667fe27
4 changed files with 386 additions and 0 deletions

View File

@ -95,6 +95,8 @@ Release 2.1.0-beta - 2013-07-02
HDFS-4762 Provide HDFS based NFSv3 and Mountd implementation (brandonli) HDFS-4762 Provide HDFS based NFSv3 and Mountd implementation (brandonli)
HDFS-4372. Track NameNode startup progress. (cnauroth) HDFS-4372. Track NameNode startup progress. (cnauroth)
HDFS-4373. Add HTTP API for querying NameNode startup progress. (cnauroth)
IMPROVEMENTS IMPROVEMENTS

View File

@ -194,6 +194,8 @@ public class NameNodeHttpServer {
} }
private static void setupServlets(HttpServer httpServer, Configuration conf) { private static void setupServlets(HttpServer httpServer, Configuration conf) {
httpServer.addInternalServlet("startupProgress",
StartupProgressServlet.PATH_SPEC, StartupProgressServlet.class);
httpServer.addInternalServlet("getDelegationToken", httpServer.addInternalServlet("getDelegationToken",
GetDelegationTokenServlet.PATH_SPEC, GetDelegationTokenServlet.PATH_SPEC,
GetDelegationTokenServlet.class, true); GetDelegationTokenServlet.class, true);

View File

@ -0,0 +1,135 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressView;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
import org.apache.hadoop.io.IOUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerator;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Servlet that provides a JSON representation of the namenode's current startup
* progress.
*/
@InterfaceAudience.Private
@SuppressWarnings("serial")
public class StartupProgressServlet extends DfsServlet {
private static final String COUNT = "count";
private static final String ELAPSED_TIME = "elapsedTime";
private static final String FILE = "file";
private static final String NAME = "name";
private static final String PERCENT_COMPLETE = "percentComplete";
private static final String PHASES = "phases";
private static final String SIZE = "size";
private static final String STATUS = "status";
private static final String STEPS = "steps";
private static final String TOTAL = "total";
public static final String PATH_SPEC = "/startupProgress";
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws IOException {
resp.setContentType("application/json; charset=UTF-8");
StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(
getServletContext());
StartupProgressView view = prog.createView();
JsonGenerator json = new JsonFactory().createJsonGenerator(resp.getWriter());
try {
json.writeStartObject();
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete());
json.writeArrayFieldStart(PHASES);
for (Phase phase: view.getPhases()) {
json.writeStartObject();
json.writeStringField(NAME, phase.getName());
json.writeStringField(STATUS, view.getStatus(phase).toString());
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase));
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase));
writeStringFieldIfNotNull(json, FILE, view.getFile(phase));
writeNumberFieldIfDefined(json, SIZE, view.getSize(phase));
json.writeArrayFieldStart(STEPS);
for (Step step: view.getSteps(phase)) {
json.writeStartObject();
StepType type = step.getType();
String name = type != null ? type.getName() : null;
writeStringFieldIfNotNull(json, NAME, name);
json.writeNumberField(COUNT, view.getCount(phase, step));
writeStringFieldIfNotNull(json, FILE, step.getFile());
writeNumberFieldIfDefined(json, SIZE, step.getSize());
json.writeNumberField(TOTAL, view.getTotal(phase, step));
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase,
step));
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase, step));
json.writeEndObject();
}
json.writeEndArray();
json.writeEndObject();
}
json.writeEndArray();
json.writeEndObject();
} finally {
IOUtils.cleanup(LOG, json);
}
}
/**
* Writes a JSON number field only if the value is defined.
*
* @param json JsonGenerator to receive output
* @param key String key to put
* @param value long value to put
* @throws IOException if there is an I/O error
*/
private static void writeNumberFieldIfDefined(JsonGenerator json, String key,
long value) throws IOException {
if (value != Long.MIN_VALUE) {
json.writeNumberField(key, value);
}
}
/**
* Writes a JSON string field only if the value is non-null.
*
* @param json JsonGenerator to receive output
* @param key String key to put
* @param value String value to put
* @throws IOException if there is an I/O error
*/
private static void writeStringFieldIfNotNull(JsonGenerator json, String key,
String value) throws IOException {
if (value != null) {
json.writeStringField(key, value);
}
}
}

View File

@ -0,0 +1,247 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressTestHelper.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
import org.junit.Before;
import org.junit.Test;
import org.mortbay.util.ajax.JSON;
public class TestStartupProgressServlet {
private HttpServletRequest req;
private HttpServletResponse resp;
private ByteArrayOutputStream respOut;
private StartupProgress startupProgress;
private StartupProgressServlet servlet;
@Before
public void setUp() throws Exception {
startupProgress = new StartupProgress();
ServletContext context = mock(ServletContext.class);
when(context.getAttribute(NameNodeHttpServer.STARTUP_PROGRESS_ATTRIBUTE_KEY))
.thenReturn(startupProgress);
servlet = mock(StartupProgressServlet.class);
when(servlet.getServletContext()).thenReturn(context);
doCallRealMethod().when(servlet).doGet(any(HttpServletRequest.class),
any(HttpServletResponse.class));
req = mock(HttpServletRequest.class);
respOut = new ByteArrayOutputStream();
PrintWriter writer = new PrintWriter(respOut);
resp = mock(HttpServletResponse.class);
when(resp.getWriter()).thenReturn(writer);
}
@Test
public void testInitialState() throws Exception {
String respBody = doGetAndReturnResponseBody();
assertNotNull(respBody);
Map<String, Object> expected = ImmutableMap.<String, Object>builder()
.put("percentComplete", 0.0f)
.put("phases", Arrays.<Object>asList(
ImmutableMap.<String, Object>builder()
.put("name", "LoadingFsImage")
.put("status", "PENDING")
.put("percentComplete", 0.0f)
.put("steps", Collections.emptyList())
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "LoadingEdits")
.put("status", "PENDING")
.put("percentComplete", 0.0f)
.put("steps", Collections.emptyList())
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "SavingCheckpoint")
.put("status", "PENDING")
.put("percentComplete", 0.0f)
.put("steps", Collections.emptyList())
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "SafeMode")
.put("status", "PENDING")
.put("percentComplete", 0.0f)
.put("steps", Collections.emptyList())
.build()))
.build();
assertEquals(JSON.toString(expected), filterJson(respBody));
}
@Test
public void testRunningState() throws Exception {
setStartupProgressForRunningState(startupProgress);
String respBody = doGetAndReturnResponseBody();
assertNotNull(respBody);
Map<String, Object> expected = ImmutableMap.<String, Object>builder()
.put("percentComplete", 0.375f)
.put("phases", Arrays.<Object>asList(
ImmutableMap.<String, Object>builder()
.put("name", "LoadingFsImage")
.put("status", "COMPLETE")
.put("percentComplete", 1.0f)
.put("steps", Collections.<Object>singletonList(
ImmutableMap.<String, Object>builder()
.put("name", "Inodes")
.put("count", 100L)
.put("total", 100L)
.put("percentComplete", 1.0f)
.build()
))
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "LoadingEdits")
.put("status", "RUNNING")
.put("percentComplete", 0.5f)
.put("steps", Collections.<Object>singletonList(
ImmutableMap.<String, Object>builder()
.put("count", 100L)
.put("file", "file")
.put("size", 1000L)
.put("total", 200L)
.put("percentComplete", 0.5f)
.build()
))
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "SavingCheckpoint")
.put("status", "PENDING")
.put("percentComplete", 0.0f)
.put("steps", Collections.emptyList())
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "SafeMode")
.put("status", "PENDING")
.put("percentComplete", 0.0f)
.put("steps", Collections.emptyList())
.build()))
.build();
assertEquals(JSON.toString(expected), filterJson(respBody));
}
@Test
public void testFinalState() throws Exception {
setStartupProgressForFinalState(startupProgress);
String respBody = doGetAndReturnResponseBody();
assertNotNull(respBody);
Map<String, Object> expected = ImmutableMap.<String, Object>builder()
.put("percentComplete", 1.0f)
.put("phases", Arrays.<Object>asList(
ImmutableMap.<String, Object>builder()
.put("name", "LoadingFsImage")
.put("status", "COMPLETE")
.put("percentComplete", 1.0f)
.put("steps", Collections.<Object>singletonList(
ImmutableMap.<String, Object>builder()
.put("name", "Inodes")
.put("count", 100L)
.put("total", 100L)
.put("percentComplete", 1.0f)
.build()
))
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "LoadingEdits")
.put("status", "COMPLETE")
.put("percentComplete", 1.0f)
.put("steps", Collections.<Object>singletonList(
ImmutableMap.<String, Object>builder()
.put("count", 200L)
.put("file", "file")
.put("size", 1000L)
.put("total", 200L)
.put("percentComplete", 1.0f)
.build()
))
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "SavingCheckpoint")
.put("status", "COMPLETE")
.put("percentComplete", 1.0f)
.put("steps", Collections.<Object>singletonList(
ImmutableMap.<String, Object>builder()
.put("name", "Inodes")
.put("count", 300L)
.put("total", 300L)
.put("percentComplete", 1.0f)
.build()
))
.build(),
ImmutableMap.<String, Object>builder()
.put("name", "SafeMode")
.put("status", "COMPLETE")
.put("percentComplete", 1.0f)
.put("steps", Collections.<Object>singletonList(
ImmutableMap.<String, Object>builder()
.put("name", "AwaitingReportedBlocks")
.put("count", 400L)
.put("total", 400L)
.put("percentComplete", 1.0f)
.build()
))
.build()))
.build();
assertEquals(JSON.toString(expected), filterJson(respBody));
}
/**
* Calls doGet on the servlet, captures the response body as a string, and
* returns it to the caller.
*
* @return String response body
* @throws IOException thrown if there is an I/O error
*/
private String doGetAndReturnResponseBody() throws IOException {
servlet.doGet(req, resp);
return new String(respOut.toByteArray(), "UTF-8");
}
/**
* Filters the given JSON response body, removing elements that would impede
* testing. Specifically, it removes elapsedTime fields, because we cannot
* predict the exact values.
*
* @param str String to filter
* @return String filtered value
*/
private String filterJson(String str) {
return str.replaceAll("\"elapsedTime\":\\d+\\,", "")
.replaceAll("\\,\"elapsedTime\":\\d+", "");
}
}