YARN-5251. Yarn CLI to obtain App logs for last 'n' bytes fails. Contributed by Xuan Gong.

This commit is contained in:
Junping Du 2016-06-17 08:24:24 -07:00
parent 09e82acaf9
commit c35fa4a0e5
6 changed files with 27 additions and 25 deletions

View File

@ -400,7 +400,7 @@ public class TestLogsCLI {
String logMessage = "Hello container_0_0001_01_000003 in stdout!";
int fileContentSize = logMessage.getBytes().length;
int tailContentSize = "End of LogType:syslog\n\n".getBytes().length;
int tailContentSize = "\nEnd of LogType:syslog\n\n".getBytes().length;
// specify how many bytes we should get from logs
// specify a position number, it would get the first n bytes from

View File

@ -787,20 +787,19 @@ public class AggregatedLogFormat {
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (bytes < 0) {
long absBytes = Math.abs(bytes);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
long skippedBytes = valueStream.skip(toSkip);
if (skippedBytes != toSkip) {
throw new IOException("The bytes were skipped are "
+ "different from the caller requested");
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, toSkip);
} else {
if (bytes < fileLength) {
totalBytesToRead = bytes;
skipAfterRead = fileLength - bytes;
}
}
@ -818,7 +817,9 @@ public class AggregatedLogFormat {
pendingRead > buf.length ? buf.length : (int) pendingRead;
len = valueStream.read(buf, 0, toRead);
}
out.println("End of LogType:" + fileType);
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, skipAfterRead);
out.println("\nEnd of LogType:" + fileType);
out.println("");
}
@ -913,20 +914,19 @@ public class AggregatedLogFormat {
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (bytes < 0) {
long absBytes = Math.abs(bytes);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
long skippedBytes = valueStream.skip(toSkip);
if (skippedBytes != toSkip) {
throw new IOException("The bytes were skipped are "
+ "different from the caller requested");
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, toSkip);
} else {
if (bytes < fileLength) {
totalBytesToRead = bytes;
skipAfterRead = fileLength - bytes;
}
}
@ -942,7 +942,9 @@ public class AggregatedLogFormat {
toRead = pendingRead > buf.length ? buf.length : (int) pendingRead;
len = valueStream.read(buf, 0, toRead);
}
out.println("End of LogType:" + fileType);
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, skipAfterRead);
out.println("\nEnd of LogType:" + fileType);
out.println("");
return 0;
} else {

View File

@ -258,7 +258,7 @@ public class TestAggregatedLogFormat {
.currentTimeMillis())).length() : 0)
+ ("\nLogLength:" + numChars).length()
+ "\nLog Contents:\n".length() + numChars + "\n".length()
+ "End of LogType:stdout\n".length();
+ "\nEnd of LogType:stdout\n".length();
Assert.assertTrue("LogType not matched", s.contains("LogType:stdout"));
Assert.assertTrue("log file:stderr should not be aggregated.", !s.contains("LogType:stderr"));
Assert.assertTrue("log file:logs should not be aggregated.", !s.contains("LogType:logs"));

View File

@ -404,20 +404,19 @@ public class AHSWebServices extends WebServices {
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (bytes < 0) {
long absBytes = Math.abs(bytes);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
long skippedBytes = valueStream.skip(toSkip);
if (skippedBytes != toSkip) {
throw new IOException("The bytes were skipped are "
+ "different from the caller requested");
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, toSkip);
} else {
if (bytes < fileLength) {
totalBytesToRead = bytes;
skipAfterRead = fileLength - bytes;
}
}
@ -435,6 +434,8 @@ public class AHSWebServices extends WebServices {
: (int) pendingRead;
len = valueStream.read(buf, 0, toRead);
}
org.apache.hadoop.io.IOUtils.skipFully(
valueStream, skipAfterRead);
sb = new StringBuilder();
sb.append("\nEnd of LogType:" + fileType + "\n");
b = sb.toString().getBytes(Charset.forName("UTF-8"));

View File

@ -264,20 +264,18 @@ public class NMWebServices {
byte[] buf = new byte[bufferSize];
long toSkip = 0;
long totalBytesToRead = fileLength;
long skipAfterRead = 0;
if (bytes < 0) {
long absBytes = Math.abs(bytes);
if (absBytes < fileLength) {
toSkip = fileLength - absBytes;
totalBytesToRead = absBytes;
}
long skippedBytes = fis.skip(toSkip);
if (skippedBytes != toSkip) {
throw new IOException("The bytes were skipped are different "
+ "from the caller requested");
}
org.apache.hadoop.io.IOUtils.skipFully(fis, toSkip);
} else {
if (bytes < fileLength) {
totalBytesToRead = bytes;
skipAfterRead = fileLength - bytes;
}
}
@ -295,6 +293,7 @@ public class NMWebServices {
: (int) pendingRead;
len = fis.read(buf, 0, toRead);
}
org.apache.hadoop.io.IOUtils.skipFully(fis, skipAfterRead);
os.flush();
} finally {
IOUtils.closeQuietly(fis);

View File

@ -948,7 +948,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest {
Assert.assertEquals(numOfLogsPerContainer, thisContainerMap.size());
for (String fileType : logFiles) {
String expectedValue =
containerStr + " Hello " + fileType + "!End of LogType:"
containerStr + " Hello " + fileType + "!\nEnd of LogType:"
+ fileType;
LOG.info("Expected log-content : " + new String(expectedValue));
String foundValue = thisContainerMap.remove(fileType);