YARN-4928. Some yarn.server.timeline.* tests fail on Windows attempting to use a test root path containing a colon. Contributed by Gergely Novák.

This commit is contained in:
Junping Du 2016-04-11 08:48:32 -07:00
parent 1ff27f9d12
commit 08ddb3ac6d
2 changed files with 52 additions and 31 deletions

View File

@ -21,6 +21,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileContextTestHelper;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@ -75,16 +76,16 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils {
= new Path(System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")),
TestEntityGroupFSTimelineStore.class.getSimpleName());
private static final Path TEST_APP_DIR_PATH
= new Path(TEST_ROOT_DIR, TEST_APP_DIR_NAME);
private static final Path TEST_ATTEMPT_DIR_PATH
= new Path(TEST_APP_DIR_PATH, TEST_ATTEMPT_DIR_NAME);
private static final Path TEST_DONE_DIR_PATH
= new Path(TEST_ROOT_DIR, "done");
private static Path testAppDirPath;
private static Path testAttemptDirPath;
private static Path testDoneDirPath;
private static Configuration config = new YarnConfiguration();
private static MiniDFSCluster hdfsCluster;
private static FileSystem fs;
private static FileContext fc;
private static FileContextTestHelper fileContextTestHelper =
new FileContextTestHelper("/tmp/TestEntityGroupFSTimelineStore");
private EntityGroupFSTimelineStore store;
private TimelineEntity entityNew;
@ -98,13 +99,17 @@ public static void setupClass() throws Exception {
YarnConfiguration
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES,
"YARN_APPLICATION,YARN_APPLICATION_ATTEMPT,YARN_CONTAINER");
config.set(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_DONE_DIR,
TEST_DONE_DIR_PATH.toString());
config.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEST_ROOT_DIR.toString());
HdfsConfiguration hdfsConfig = new HdfsConfiguration();
hdfsCluster
= new MiniDFSCluster.Builder(hdfsConfig).numDataNodes(1).build();
fs = hdfsCluster.getFileSystem();
fc = FileContext.getFileContext(hdfsCluster.getURI(0), config);
testAppDirPath = getTestRootPath(TEST_APPLICATION_ID.toString());
testAttemptDirPath = new Path(testAppDirPath, TEST_ATTEMPT_DIR_NAME);
testDoneDirPath = getTestRootPath("done");
config.set(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_DONE_DIR, testDoneDirPath.toString());
}
@Before
@ -123,7 +128,7 @@ public void setup() throws Exception {
@After
public void tearDown() throws Exception {
store.stop();
fs.delete(TEST_APP_DIR_PATH, true);
fs.delete(testAppDirPath, true);
}
@AfterClass
@ -137,7 +142,7 @@ public static void tearDownClass() throws Exception {
@Test
public void testAppLogsScanLogs() throws Exception {
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath,
AppState.COMPLETED);
appLogs.scanForLogs();
List<LogInfo> summaryLogs = appLogs.getSummaryLogs();
@ -160,20 +165,20 @@ store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
@Test
public void testMoveToDone() throws Exception {
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath,
AppState.COMPLETED);
Path pathBefore = appLogs.getAppDirPath();
appLogs.moveToDone();
Path pathAfter = appLogs.getAppDirPath();
assertNotEquals(pathBefore, pathAfter);
assertTrue(pathAfter.toString().contains(TEST_DONE_DIR_PATH.toString()));
assertTrue(pathAfter.toString().contains(testDoneDirPath.toString()));
}
@Test
public void testParseSummaryLogs() throws Exception {
TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithMemStore(config);
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath,
AppState.COMPLETED);
appLogs.scanForLogs();
appLogs.parseSummaryLogs(tdm);
@ -185,14 +190,14 @@ public void testCleanLogs() throws Exception {
// Create test dirs and files
// Irrelevant file, should not be reclaimed
Path irrelevantFilePath = new Path(
TEST_DONE_DIR_PATH, "irrelevant.log");
testDoneDirPath, "irrelevant.log");
FSDataOutputStream stream = fs.create(irrelevantFilePath);
stream.close();
// Irrelevant directory, should not be reclaimed
Path irrelevantDirPath = new Path(TEST_DONE_DIR_PATH, "irrelevant");
Path irrelevantDirPath = new Path(testDoneDirPath, "irrelevant");
fs.mkdirs(irrelevantDirPath);
Path doneAppHomeDir = new Path(new Path(TEST_DONE_DIR_PATH, "0000"), "001");
Path doneAppHomeDir = new Path(new Path(testDoneDirPath, "0000"), "001");
// First application, untouched after creation
Path appDirClean = new Path(doneAppHomeDir, TEST_APP_DIR_NAME);
Path attemptDirClean = new Path(appDirClean, TEST_ATTEMPT_DIR_NAME);
@ -222,7 +227,7 @@ public void testCleanLogs() throws Exception {
fs.mkdirs(dirPathEmpty);
// Should retain all logs after this run
store.cleanLogs(TEST_DONE_DIR_PATH, fs, 10000);
store.cleanLogs(testDoneDirPath, fs, 10000);
assertTrue(fs.exists(irrelevantDirPath));
assertTrue(fs.exists(irrelevantFilePath));
assertTrue(fs.exists(filePath));
@ -239,7 +244,7 @@ public void testCleanLogs() throws Exception {
// Touch the third application by creating a new dir
fs.mkdirs(new Path(dirPathHold, "holdByMe"));
store.cleanLogs(TEST_DONE_DIR_PATH, fs, 1000);
store.cleanLogs(testDoneDirPath, fs, 1000);
// Verification after the second cleaner call
assertTrue(fs.exists(irrelevantDirPath));
@ -261,7 +266,7 @@ public void testPluginRead() throws Exception {
YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES));
// Load data and cache item, prepare timeline store by making a cache item
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath,
AppState.COMPLETED);
EntityCacheItem cacheItem = new EntityCacheItem(config, fs);
cacheItem.setAppLogs(appLogs);
@ -291,7 +296,7 @@ store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
public void testSummaryRead() throws Exception {
// Load data
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath,
AppState.COMPLETED);
TimelineDataManager tdm
= PluginStoreTestUtils.getTdmWithStore(config, store);
@ -314,7 +319,7 @@ store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH,
private void createTestFiles() throws IOException {
TimelineEntities entities = PluginStoreTestUtils.generateTestEntities();
PluginStoreTestUtils.writeEntities(entities,
new Path(TEST_ATTEMPT_DIR_PATH, TEST_SUMMARY_LOG_FILE_NAME), fs);
new Path(testAttemptDirPath, TEST_SUMMARY_LOG_FILE_NAME), fs);
entityNew = PluginStoreTestUtils
.createEntity("id_3", "type_3", 789l, null, null,
@ -322,11 +327,15 @@ private void createTestFiles() throws IOException {
TimelineEntities entityList = new TimelineEntities();
entityList.addEntity(entityNew);
PluginStoreTestUtils.writeEntities(entityList,
new Path(TEST_ATTEMPT_DIR_PATH, TEST_ENTITY_LOG_FILE_NAME), fs);
new Path(testAttemptDirPath, TEST_ENTITY_LOG_FILE_NAME), fs);
FSDataOutputStream out = fs.create(
new Path(TEST_ATTEMPT_DIR_PATH, TEST_DOMAIN_LOG_FILE_NAME));
new Path(testAttemptDirPath, TEST_DOMAIN_LOG_FILE_NAME));
out.close();
}
private static Path getTestRootPath(String pathString) {
return fileContextTestHelper.getTestRootPath(fc, pathString);
}
}

View File

@ -18,6 +18,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileContextTestHelper;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
@ -60,6 +62,8 @@ public class TestLogInfo {
private Configuration config = new YarnConfiguration();
private MiniDFSCluster hdfsCluster;
private FileSystem fs;
private FileContext fc;
private FileContextTestHelper fileContextTestHelper = new FileContextTestHelper("/tmp/TestLogInfo");
private ObjectMapper objMapper;
private JsonFactory jsonFactory = new JsonFactory();
@ -77,7 +81,8 @@ public void setup() throws Exception {
HdfsConfiguration hdfsConfig = new HdfsConfiguration();
hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig).numDataNodes(1).build();
fs = hdfsCluster.getFileSystem();
Path testAppDirPath = new Path(TEST_ROOT_DIR, TEST_ATTEMPT_DIR_NAME);
fc = FileContext.getFileContext(hdfsCluster.getURI(0), config);
Path testAppDirPath = getTestRootPath(TEST_ATTEMPT_DIR_NAME);
fs.mkdirs(testAppDirPath, new FsPermission(FILE_LOG_DIR_PERMISSIONS));
objMapper = PluginStoreTestUtils.createObjectMapper();
@ -146,7 +151,7 @@ public void testParseEntity() throws Exception {
EntityLogInfo testLogInfo = new EntityLogInfo(TEST_ATTEMPT_DIR_NAME,
TEST_ENTITY_FILE_NAME,
UserGroupInformation.getLoginUser().getUserName());
testLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper,
testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper,
fs);
// Verify for the first batch
PluginStoreTestUtils.verifyTestEntities(tdm);
@ -157,9 +162,8 @@ public void testParseEntity() throws Exception {
TimelineEntities entityList = new TimelineEntities();
entityList.addEntity(entityNew);
writeEntitiesLeaveOpen(entityList,
new Path(new Path(TEST_ROOT_DIR, TEST_ATTEMPT_DIR_NAME),
TEST_ENTITY_FILE_NAME));
testLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper,
new Path(getTestRootPath(TEST_ATTEMPT_DIR_NAME), TEST_ENTITY_FILE_NAME));
testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper,
fs);
// Verify the newly added data
TimelineEntity entity3 = tdm.getEntity(entityNew.getEntityType(),
@ -182,9 +186,9 @@ public void testParseBrokenEntity() throws Exception {
TEST_BROKEN_FILE_NAME,
UserGroupInformation.getLoginUser().getUserName());
// Try parse, should not fail
testLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper,
testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper,
fs);
domainLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper,
domainLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper,
fs);
tdm.close();
}
@ -196,7 +200,7 @@ public void testParseDomain() throws Exception {
DomainLogInfo domainLogInfo = new DomainLogInfo(TEST_ATTEMPT_DIR_NAME,
TEST_DOMAIN_FILE_NAME,
UserGroupInformation.getLoginUser().getUserName());
domainLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper,
domainLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper,
fs);
// Verify domain data
TimelineDomain resultDomain = tdm.getDomain("domain_1",
@ -250,4 +254,12 @@ private void writeDomainLeaveOpen(TimelineDomain domain, Path logPath)
outStreamDomain.hflush();
}
private Path getTestRootPath() {
return fileContextTestHelper.getTestRootPath(fc);
}
private Path getTestRootPath(String pathString) {
return fileContextTestHelper.getTestRootPath(fc, pathString);
}
}