MAPREDUCE-4416. Some tests fail if Clover is enabled (Kihwal Lee via bobby)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1360735 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b5b0ac64a1
commit
26910fd058
|
@ -221,6 +221,8 @@ Branch-2 ( Unreleased changes )
|
||||||
MAPREDUCE-3993. Graceful handling of codec errors during decompression
|
MAPREDUCE-3993. Graceful handling of codec errors during decompression
|
||||||
(kkambatl via tucu)
|
(kkambatl via tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-4416. Some tests fail if Clover is enabled (Kihwal Lee via bobby)
|
||||||
|
|
||||||
Release 2.0.0-alpha - 05-23-2012
|
Release 2.0.0-alpha - 05-23-2012
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -35,16 +35,23 @@ import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
public class TestIndexCache extends TestCase {
|
public class TestIndexCache extends TestCase {
|
||||||
|
private JobConf conf;
|
||||||
|
private FileSystem fs;
|
||||||
|
private Path p;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setUp() throws IOException {
|
||||||
|
conf = new JobConf();
|
||||||
|
fs = FileSystem.getLocal(conf).getRaw();
|
||||||
|
p = new Path(System.getProperty("test.build.data", "/tmp"),
|
||||||
|
"cache").makeQualified(fs.getUri(), fs.getWorkingDirectory());
|
||||||
|
}
|
||||||
|
|
||||||
public void testLRCPolicy() throws Exception {
|
public void testLRCPolicy() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
long seed = r.nextLong();
|
long seed = r.nextLong();
|
||||||
r.setSeed(seed);
|
r.setSeed(seed);
|
||||||
System.out.println("seed: " + seed);
|
System.out.println("seed: " + seed);
|
||||||
JobConf conf = new JobConf();
|
|
||||||
FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
|
||||||
Path p = new Path(System.getProperty("test.build.data", "/tmp"),
|
|
||||||
"cache").makeQualified(fs);
|
|
||||||
fs.delete(p, true);
|
fs.delete(p, true);
|
||||||
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
||||||
final int partsPerMap = 1000;
|
final int partsPerMap = 1000;
|
||||||
|
@ -115,10 +122,6 @@ public class TestIndexCache extends TestCase {
|
||||||
|
|
||||||
public void testBadIndex() throws Exception {
|
public void testBadIndex() throws Exception {
|
||||||
final int parts = 30;
|
final int parts = 30;
|
||||||
JobConf conf = new JobConf();
|
|
||||||
FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
|
||||||
Path p = new Path(System.getProperty("test.build.data", "/tmp"),
|
|
||||||
"cache").makeQualified(fs);
|
|
||||||
fs.delete(p, true);
|
fs.delete(p, true);
|
||||||
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
||||||
IndexCache cache = new IndexCache(conf);
|
IndexCache cache = new IndexCache(conf);
|
||||||
|
@ -150,10 +153,6 @@ public class TestIndexCache extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInvalidReduceNumberOrLength() throws Exception {
|
public void testInvalidReduceNumberOrLength() throws Exception {
|
||||||
JobConf conf = new JobConf();
|
|
||||||
FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
|
||||||
Path p = new Path(System.getProperty("test.build.data", "/tmp"),
|
|
||||||
"cache").makeQualified(fs);
|
|
||||||
fs.delete(p, true);
|
fs.delete(p, true);
|
||||||
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
||||||
final int partsPerMap = 1000;
|
final int partsPerMap = 1000;
|
||||||
|
@ -199,10 +198,6 @@ public class TestIndexCache extends TestCase {
|
||||||
// This test case may not repeatable. But on my macbook this test
|
// This test case may not repeatable. But on my macbook this test
|
||||||
// fails with probability of 100% on code before MAPREDUCE-2541,
|
// fails with probability of 100% on code before MAPREDUCE-2541,
|
||||||
// so it is repeatable in practice.
|
// so it is repeatable in practice.
|
||||||
JobConf conf = new JobConf();
|
|
||||||
FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
|
||||||
Path p = new Path(System.getProperty("test.build.data", "/tmp"),
|
|
||||||
"cache").makeQualified(fs);
|
|
||||||
fs.delete(p, true);
|
fs.delete(p, true);
|
||||||
conf.setInt(TTConfig.TT_INDEX_CACHE, 10);
|
conf.setInt(TTConfig.TT_INDEX_CACHE, 10);
|
||||||
// Make a big file so removeMapThread almost surely runs faster than
|
// Make a big file so removeMapThread almost surely runs faster than
|
||||||
|
@ -247,6 +242,66 @@ public class TestIndexCache extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCreateRace() throws Exception {
|
||||||
|
fs.delete(p, true);
|
||||||
|
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
||||||
|
final int partsPerMap = 1000;
|
||||||
|
final int bytesPerFile = partsPerMap * 24;
|
||||||
|
final IndexCache cache = new IndexCache(conf);
|
||||||
|
|
||||||
|
final Path racy = new Path(p, "racyIndex");
|
||||||
|
final String user =
|
||||||
|
UserGroupInformation.getCurrentUser().getShortUserName();
|
||||||
|
writeFile(fs, racy, bytesPerFile, partsPerMap);
|
||||||
|
|
||||||
|
// run multiple instances
|
||||||
|
Thread[] getInfoThreads = new Thread[50];
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
getInfoThreads[i] = new Thread() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
cache.getIndexInformation("racyIndex", partsPerMap, racy, user);
|
||||||
|
cache.removeMap("racyIndex");
|
||||||
|
} catch (Exception e) {
|
||||||
|
// should not be here
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
getInfoThreads[i].start();
|
||||||
|
}
|
||||||
|
|
||||||
|
final Thread mainTestThread = Thread.currentThread();
|
||||||
|
|
||||||
|
Thread timeoutThread = new Thread() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
Thread.sleep(15000);
|
||||||
|
mainTestThread.interrupt();
|
||||||
|
} catch (InterruptedException ie) {
|
||||||
|
// we are done;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for (int i = 0; i < 50; i++) {
|
||||||
|
try {
|
||||||
|
getInfoThreads[i].join();
|
||||||
|
} catch (InterruptedException ie) {
|
||||||
|
// we haven't finished in time. Potential deadlock/race.
|
||||||
|
fail("Unexpectedly long delay during concurrent cache entry creations");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// stop the timeoutThread. If we get interrupted before stopping, there
|
||||||
|
// must be something wrong, although it wasn't a deadlock. No need to
|
||||||
|
// catch and swallow.
|
||||||
|
timeoutThread.interrupt();
|
||||||
|
}
|
||||||
|
|
||||||
private static void checkRecord(IndexRecord rec, long fill) {
|
private static void checkRecord(IndexRecord rec, long fill) {
|
||||||
assertEquals(fill, rec.startOffset);
|
assertEquals(fill, rec.startOffset);
|
||||||
assertEquals(fill, rec.rawLength);
|
assertEquals(fill, rec.rawLength);
|
||||||
|
|
|
@ -99,6 +99,23 @@
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>clover</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>false</activeByDefault>
|
||||||
|
<property>
|
||||||
|
<name>clover</name>
|
||||||
|
</property>
|
||||||
|
</activation>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.cenqua.clover</groupId>
|
||||||
|
<artifactId>clover</artifactId>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
|
|
@ -31,4 +31,21 @@
|
||||||
<modules>
|
<modules>
|
||||||
<module>hadoop-yarn-applications-distributedshell</module>
|
<module>hadoop-yarn-applications-distributedshell</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>clover</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>false</activeByDefault>
|
||||||
|
<property>
|
||||||
|
<name>clover</name>
|
||||||
|
</property>
|
||||||
|
</activation>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.cenqua.clover</groupId>
|
||||||
|
<artifactId>clover</artifactId>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -648,7 +648,12 @@
|
||||||
<version>4.0.0</version>
|
<version>4.0.0</version>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.cenqua.clover</groupId>
|
||||||
|
<artifactId>clover</artifactId>
|
||||||
|
<!-- Use the version needed by maven-clover-plugin -->
|
||||||
|
<version>3.0.2</version>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue