YARN-1780. Improved logging in the Timeline client and server. Contributed by Zhijie Shen.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1575141 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2014-03-07 03:52:53 +00:00
parent 173c115951
commit f4eb6ddec9
4 changed files with 59 additions and 8 deletions

View File

@ -265,6 +265,9 @@ Release 2.4.0 - UNRELEASED
YARN-1761. Modified RMAdmin CLI to check whether HA is enabled or not before
it executes any of the HA admin related commands. (Xuan Gong via vinodkv)
YARN-1780. Improved logging in the Timeline client and server. (Zhijie Shen
via vinodkv)
OPTIMIZATIONS
BUG FIXES

View File

@ -75,6 +75,7 @@ public class TimelineClientImpl extends TimelineClient {
YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS), RESOURCE_URI_STR));
}
LOG.info("Timeline service address: " + resURI);
super.serviceInit(conf);
}
@ -83,12 +84,22 @@ public class TimelineClientImpl extends TimelineClient {
TimelineEntity... entities) throws IOException, YarnException {
TimelineEntities entitiesContainer = new TimelineEntities();
entitiesContainer.addEntities(Arrays.asList(entities));
ClientResponse resp = doPostingEntities(entitiesContainer);
if (resp.getClientResponseStatus() != ClientResponse.Status.OK) {
ClientResponse resp;
try {
resp = doPostingEntities(entitiesContainer);
} catch (RuntimeException re) {
// runtime exception is expected if the client cannot connect the server
String msg =
"Failed to get the response from the timeline server.";
LOG.error(msg, re);
throw re;
}
if (resp == null ||
resp.getClientResponseStatus() != ClientResponse.Status.OK) {
String msg =
"Failed to get the response from the timeline server.";
LOG.error(msg);
if (LOG.isDebugEnabled()) {
if (LOG.isDebugEnabled() && resp != null) {
String output = resp.getEntity(String.class);
LOG.debug("HTTP error code: " + resp.getStatus()
+ " Server response : \n" + output);

View File

@ -20,9 +20,13 @@ package org.apache.hadoop.yarn.client.api.impl;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.net.ConnectException;
import junit.framework.Assert;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
@ -36,6 +40,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.ClientResponse;
public class TestTimelineClient {
@ -56,7 +61,7 @@ public class TestTimelineClient {
@Test
public void testPostEntities() throws Exception {
mockClientResponse(ClientResponse.Status.OK, false);
mockClientResponse(ClientResponse.Status.OK, false, false);
try {
TimelinePutResponse response = client.putEntities(generateEntity());
Assert.assertEquals(0, response.getErrors().size());
@ -67,7 +72,7 @@ public class TestTimelineClient {
@Test
public void testPostEntitiesWithError() throws Exception {
mockClientResponse(ClientResponse.Status.OK, true);
mockClientResponse(ClientResponse.Status.OK, true, false);
try {
TimelinePutResponse response = client.putEntities(generateEntity());
Assert.assertEquals(1, response.getErrors().size());
@ -84,7 +89,8 @@ public class TestTimelineClient {
@Test
public void testPostEntitiesNoResponse() throws Exception {
mockClientResponse(ClientResponse.Status.INTERNAL_SERVER_ERROR, false);
mockClientResponse(
ClientResponse.Status.INTERNAL_SERVER_ERROR, false, false);
try {
client.putEntities(generateEntity());
Assert.fail("Exception is expected");
@ -94,9 +100,25 @@ public class TestTimelineClient {
}
}
@Test
public void testPostEntitiesConnectionRefused() throws Exception {
mockClientResponse(null, false, true);
try {
client.putEntities(generateEntity());
Assert.fail("RuntimeException is expected");
} catch (RuntimeException re) {
Assert.assertTrue(re instanceof ClientHandlerException);
}
}
private ClientResponse mockClientResponse(ClientResponse.Status status,
boolean hasError) {
boolean hasError, boolean hasRuntimeError) {
ClientResponse response = mock(ClientResponse.class);
if (hasRuntimeError) {
doThrow(new ClientHandlerException(new ConnectException())).when(client)
.doPostingEntities(any(TimelineEntities.class));
return response;
}
doReturn(response).when(client)
.doPostingEntities(any(TimelineEntities.class));
when(response.getClientResponseStatus()).thenReturn(status);

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.CSV_JOINER;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
@ -54,10 +56,12 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.apache.hadoop.yarn.webapp.BadRequestException;
import com.google.inject.Inject;
@ -245,6 +249,17 @@ public class TimelineWebServices {
return new TimelinePutResponse();
}
try {
List<EntityIdentifier> entityIDs = new ArrayList<EntityIdentifier>();
for (TimelineEntity entity : entities.getEntities()) {
EntityIdentifier entityID =
new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
entityIDs.add(entityID);
if (LOG.isDebugEnabled()) {
LOG.debug("Storing the entity " + entityID + ", JSON-style content: "
+ TimelineUtils.dumpTimelineRecordtoJSON(entity));
}
}
LOG.info("Storing entities: " + CSV_JOINER.join(entityIDs));
return store.put(entities);
} catch (IOException e) {
LOG.error("Error putting entities", e);