svn merge -c 1390616 to fix HADOOP-8843. Old trash directories are never deleted on upgrade from 1.x. Contributed by Jason Lowe

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1390621 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jason Darrell Lowe 2012-09-26 17:23:34 +00:00
parent e5e1887092
commit f4271de5da
3 changed files with 57 additions and 3 deletions

View File

@ -747,6 +747,9 @@ Release 0.23.4 - UNRELEASED
BUG FIXES BUG FIXES
HADOOP-8843. Old trash directories are never deleted on upgrade
from 1.x (jlowe)
Release 0.23.3 - UNRELEASED Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -61,6 +61,9 @@ public class TrashPolicyDefault extends TrashPolicy {
new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE); new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
private static final DateFormat CHECKPOINT = new SimpleDateFormat("yyMMddHHmmss"); private static final DateFormat CHECKPOINT = new SimpleDateFormat("yyMMddHHmmss");
/** Format of checkpoint directories used prior to Hadoop 0.23. */
private static final DateFormat OLD_CHECKPOINT =
new SimpleDateFormat("yyMMddHHmm");
private static final int MSECS_PER_MINUTE = 60*1000; private static final int MSECS_PER_MINUTE = 60*1000;
private Path current; private Path current;
@ -202,9 +205,7 @@ public class TrashPolicyDefault extends TrashPolicy {
long time; long time;
try { try {
synchronized (CHECKPOINT) { time = getTimeFromCheckpoint(name);
time = CHECKPOINT.parse(name).getTime();
}
} catch (ParseException e) { } catch (ParseException e) {
LOG.warn("Unexpected item in trash: "+dir+". Ignoring."); LOG.warn("Unexpected item in trash: "+dir+". Ignoring.");
continue; continue;
@ -304,4 +305,22 @@ public class TrashPolicyDefault extends TrashPolicy {
return (time / interval) * interval; return (time / interval) * interval;
} }
} }
private long getTimeFromCheckpoint(String name) throws ParseException {
long time;
try {
synchronized (CHECKPOINT) {
time = CHECKPOINT.parse(name).getTime();
}
} catch (ParseException pe) {
// Check for old-style checkpoint directories left over
// after an upgrade from Hadoop 1.x
synchronized (OLD_CHECKPOINT) {
time = OLD_CHECKPOINT.parse(name).getTime();
}
}
return time;
}
} }

View File

@ -26,6 +26,8 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.net.URI; import java.net.URI;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
@ -434,6 +436,36 @@ public class TestTrash extends TestCase {
output.indexOf("Failed to determine server trash configuration") != -1); output.indexOf("Failed to determine server trash configuration") != -1);
} }
// Verify old checkpoint format is recognized
{
// emulate two old trash checkpoint directories, one that is old enough
// to be deleted on the next expunge and one that isn't.
long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY,
FS_TRASH_INTERVAL_DEFAULT);
long now = Time.now();
DateFormat oldCheckpointFormat = new SimpleDateFormat("yyMMddHHmm");
Path dirToDelete = new Path(trashRoot.getParent(),
oldCheckpointFormat.format(now - (trashInterval * 60 * 1000) - 1));
Path dirToKeep = new Path(trashRoot.getParent(),
oldCheckpointFormat.format(now));
mkdir(trashRootFs, dirToDelete);
mkdir(trashRootFs, dirToKeep);
// Clear out trash
int rc = -1;
try {
rc = shell.run(new String [] { "-expunge" } );
} catch (Exception e) {
System.err.println("Exception raised from fs expunge " +
e.getLocalizedMessage());
}
assertEquals(0, rc);
assertFalse("old checkpoint format not recognized",
trashRootFs.exists(dirToDelete));
assertTrue("old checkpoint format directory should not be removed",
trashRootFs.exists(dirToKeep));
}
} }
public static void trashNonDefaultFS(Configuration conf) throws IOException { public static void trashNonDefaultFS(Configuration conf) throws IOException {