HBASE-11 Unexpected exits corrupt DFS
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@648030 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
73f126dbe9
commit
9a80650d97
|
@ -3,6 +3,7 @@ Hbase Change Log
|
||||||
HBASE-574 HBase does not load hadoop native libs (Rong-En Fan via Stack)
|
HBASE-574 HBase does not load hadoop native libs (Rong-En Fan via Stack)
|
||||||
HBASE-573 HBase does not read hadoop-*.xml for dfs configuration after
|
HBASE-573 HBase does not read hadoop-*.xml for dfs configuration after
|
||||||
moving out hadoop/contrib
|
moving out hadoop/contrib
|
||||||
|
HBASE-11 Unexpected exits corrupt DFS
|
||||||
|
|
||||||
Release 0.1.1 - 04/11/2008
|
Release 0.1.1 - 04/11/2008
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
|
import java.io.EOFException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -296,7 +297,7 @@ public class HLog implements HConstants {
|
||||||
private void deleteLogFile(final Path p, final Long seqno) throws IOException {
|
private void deleteLogFile(final Path p, final Long seqno) throws IOException {
|
||||||
LOG.info("removing old log file " + p.toString() +
|
LOG.info("removing old log file " + p.toString() +
|
||||||
" whose highest sequence/edit id is " + seqno);
|
" whose highest sequence/edit id is " + seqno);
|
||||||
this.fs.delete(p);
|
this.fs.delete(p, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -314,7 +315,7 @@ public class HLog implements HConstants {
|
||||||
*/
|
*/
|
||||||
public void closeAndDelete() throws IOException {
|
public void closeAndDelete() throws IOException {
|
||||||
close();
|
close();
|
||||||
fs.delete(dir);
|
fs.delete(dir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -588,7 +589,7 @@ public class HLog implements HConstants {
|
||||||
w.append(oldkey, oldval);
|
w.append(oldkey, oldval);
|
||||||
}
|
}
|
||||||
old.close();
|
old.close();
|
||||||
fs.delete(oldlogfile);
|
fs.delete(oldlogfile, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
w.append(key, val);
|
w.append(key, val);
|
||||||
|
@ -597,8 +598,11 @@ public class HLog implements HConstants {
|
||||||
LOG.debug("Applied " + count + " total edits");
|
LOG.debug("Applied " + count + " total edits");
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
e = RemoteExceptionHandler.checkIOException(e);
|
||||||
|
if (!(e instanceof EOFException)) {
|
||||||
LOG.warn("Exception processing " + logfiles[i].getPath() +
|
LOG.warn("Exception processing " + logfiles[i].getPath() +
|
||||||
" -- continuing. Possible DATA LOSS!", e);
|
" -- continuing. Possible DATA LOSS!", e);
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
in.close();
|
in.close();
|
||||||
|
@ -610,7 +614,7 @@ public class HLog implements HConstants {
|
||||||
// nothing we can do about it. Replaying it, it could work but we
|
// nothing we can do about it. Replaying it, it could work but we
|
||||||
// could be stuck replaying for ever. Just continue though we
|
// could be stuck replaying for ever. Just continue though we
|
||||||
// could have lost some edits.
|
// could have lost some edits.
|
||||||
fs.delete(logfiles[i].getPath());
|
fs.delete(logfiles[i].getPath(), true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
|
Loading…
Reference in New Issue