HBASE-2529 Make OldLogsCleaner easier to extend
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@945963 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
96321df582
commit
561ae3b226
|
@ -641,6 +641,7 @@ Release 0.21.0 - Unreleased
|
||||||
HBASE-2438 Addition of a Column Pagination Filter (Paul Kist via Stack)
|
HBASE-2438 Addition of a Column Pagination Filter (Paul Kist via Stack)
|
||||||
HBASE-2473 Add to admin create table start and end key params and
|
HBASE-2473 Add to admin create table start and end key params and
|
||||||
desired number of regions
|
desired number of regions
|
||||||
|
HBASE-2529 Make OldLogsCleaner easier to extend
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
HBASE-410 [testing] Speed up the test suite
|
HBASE-410 [testing] Speed up the test suite
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2010 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hbase.master;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configurable;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for the log cleaning function inside the master. Only 1 is called
|
||||||
|
* so if the desired effect is the mix of many cleaners, do call them yourself
|
||||||
|
* in order to control the flow.
|
||||||
|
* HBase ships with OldLogsCleaner as the default implementation
|
||||||
|
*/
|
||||||
|
public interface LogCleanerDelegate extends Configurable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should the master delete the log or keep it?
|
||||||
|
* @param filePath full path to log.
|
||||||
|
* @return true if the log is deletable, false if not
|
||||||
|
*/
|
||||||
|
public boolean isLogDeletable(Path filePath);
|
||||||
|
}
|
|
@ -41,59 +41,59 @@ public class OldLogsCleaner extends Chore {
|
||||||
|
|
||||||
static final Log LOG = LogFactory.getLog(OldLogsCleaner.class.getName());
|
static final Log LOG = LogFactory.getLog(OldLogsCleaner.class.getName());
|
||||||
|
|
||||||
// Configured time a log can be kept after it was closed
|
|
||||||
private final long ttl;
|
|
||||||
// Max number we can delete on every chore, this is to make sure we don't
|
// Max number we can delete on every chore, this is to make sure we don't
|
||||||
// issue thousands of delete commands around the same time
|
// issue thousands of delete commands around the same time
|
||||||
private final int maxDeletedLogs;
|
private final int maxDeletedLogs;
|
||||||
private final FileSystem fs;
|
private final FileSystem fs;
|
||||||
private final Path oldLogDir;
|
private final Path oldLogDir;
|
||||||
// We expect a file looking like ts.hlog.dat.ts
|
private final LogCleanerDelegate logCleaner;
|
||||||
|
private final Configuration conf;
|
||||||
|
// We expect a file looking like hlog.dat.ts
|
||||||
private final Pattern pattern = Pattern.compile("\\d*\\.hlog\\.dat\\.\\d*");
|
private final Pattern pattern = Pattern.compile("\\d*\\.hlog\\.dat\\.\\d*");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param p
|
* @param p the period of time to sleep between each run
|
||||||
* @param s
|
* @param s the stopper boolean
|
||||||
* @param conf
|
* @param conf configuration to use
|
||||||
* @param fs
|
* @param fs handle to the FS
|
||||||
* @param oldLogDir
|
* @param oldLogDir the path to the archived logs
|
||||||
*/
|
*/
|
||||||
public OldLogsCleaner(final int p, final AtomicBoolean s,
|
public OldLogsCleaner(final int p, final AtomicBoolean s,
|
||||||
Configuration conf, FileSystem fs,
|
Configuration conf, FileSystem fs,
|
||||||
Path oldLogDir) {
|
Path oldLogDir) {
|
||||||
super(p, s);
|
super(p, s);
|
||||||
this.ttl = conf.getLong("hbase.master.logcleaner.ttl", 600000);
|
|
||||||
this.maxDeletedLogs =
|
this.maxDeletedLogs =
|
||||||
conf.getInt("hbase.master.logcleaner.maxdeletedlogs", 20);
|
conf.getInt("hbase.master.logcleaner.maxdeletedlogs", 20);
|
||||||
this.fs = fs;
|
this.fs = fs;
|
||||||
this.oldLogDir = oldLogDir;
|
this.oldLogDir = oldLogDir;
|
||||||
|
this.conf = conf;
|
||||||
|
this.logCleaner = getLogCleaner();
|
||||||
|
}
|
||||||
|
|
||||||
|
private LogCleanerDelegate getLogCleaner() {
|
||||||
|
try {
|
||||||
|
Class c = Class.forName(conf.get("hbase.master.logcleanerplugin.impl",
|
||||||
|
TimeToLiveLogCleaner.class.getCanonicalName()));
|
||||||
|
LogCleanerDelegate cleaner = (LogCleanerDelegate) c.newInstance();
|
||||||
|
cleaner.setConf(conf);
|
||||||
|
return cleaner;
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.warn("Passed log cleaner implementation throws errors, " +
|
||||||
|
"defaulting to TimeToLiveLogCleaner", e);
|
||||||
|
return new TimeToLiveLogCleaner();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void chore() {
|
protected void chore() {
|
||||||
try {
|
try {
|
||||||
FileStatus[] files = this.fs.listStatus(this.oldLogDir);
|
FileStatus[] files = this.fs.listStatus(this.oldLogDir);
|
||||||
long currentTime = System.currentTimeMillis();
|
|
||||||
int nbDeletedLog = 0;
|
int nbDeletedLog = 0;
|
||||||
for (FileStatus file : files) {
|
for (FileStatus file : files) {
|
||||||
Path filePath = file.getPath();
|
Path filePath = file.getPath();
|
||||||
|
|
||||||
if (pattern.matcher(filePath.getName()).matches()) {
|
if (pattern.matcher(filePath.getName()).matches()) {
|
||||||
String[] parts = filePath.getName().split("\\.");
|
if (logCleaner.isLogDeletable(filePath) ) {
|
||||||
long time = 0;
|
|
||||||
try {
|
|
||||||
time = Long.parseLong(parts[3]);
|
|
||||||
} catch (NumberFormatException e) {
|
|
||||||
// won't happen
|
|
||||||
}
|
|
||||||
long life = currentTime - time;
|
|
||||||
if (life < 0) {
|
|
||||||
LOG.warn("Found a log newer than current time, " +
|
|
||||||
"probably a clock skew");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (life > ttl) {
|
|
||||||
this.fs.delete(filePath, true);
|
this.fs.delete(filePath, true);
|
||||||
nbDeletedLog++;
|
nbDeletedLog++;
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,6 @@ public class OldLogsCleaner extends Chore {
|
||||||
if (nbDeletedLog >= maxDeletedLogs) {
|
if (nbDeletedLog >= maxDeletedLogs) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e = RemoteExceptionHandler.checkIOException(e);
|
e = RemoteExceptionHandler.checkIOException(e);
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2010 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hbase.master;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log cleaner that uses the timestamp of the hlog to determine if it should
|
||||||
|
* be deleted. By default they are allowed to live for 10 minutes.
|
||||||
|
*/
|
||||||
|
public class TimeToLiveLogCleaner implements LogCleanerDelegate {
|
||||||
|
|
||||||
|
static final Log LOG =
|
||||||
|
LogFactory.getLog(TimeToLiveLogCleaner.class.getName());
|
||||||
|
private Configuration conf;
|
||||||
|
// Configured time a log can be kept after it was closed
|
||||||
|
private long ttl;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isLogDeletable(Path filePath) {
|
||||||
|
long time = 0;
|
||||||
|
long currentTime = System.currentTimeMillis();
|
||||||
|
System.out.println(filePath.getName());
|
||||||
|
String[] parts = filePath.getName().split("\\.");
|
||||||
|
try {
|
||||||
|
time = Long.parseLong(parts[3]);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
long life = currentTime - time;
|
||||||
|
if (life < 0) {
|
||||||
|
LOG.warn("Found a log newer than current time, " +
|
||||||
|
"probably a clock skew");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return life > ttl;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setConf(Configuration conf) {
|
||||||
|
this.conf = conf;
|
||||||
|
this.ttl = conf.getLong("hbase.master.logcleaner.ttl", 600000);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Configuration getConf() {
|
||||||
|
return conf;
|
||||||
|
}
|
||||||
|
}
|
|
@ -86,9 +86,10 @@ public class TestOldLogsCleaner {
|
||||||
fs.createNewFile(new Path(oldLogDir, "1.hlog.dat.a"));
|
fs.createNewFile(new Path(oldLogDir, "1.hlog.dat.a"));
|
||||||
fs.createNewFile(new Path(oldLogDir, "1.hlog.dat." + now));
|
fs.createNewFile(new Path(oldLogDir, "1.hlog.dat." + now));
|
||||||
for(int i = 0; i < 30; i++) {
|
for(int i = 0; i < 30; i++) {
|
||||||
fs.createNewFile(new Path(oldLogDir, i + ".hlog.dat." +(now - 6000000)));
|
fs.createNewFile(new Path(oldLogDir, 1 + "hlog.dat." +
|
||||||
|
(now - 6000000 - i)));
|
||||||
}
|
}
|
||||||
fs.createNewFile(new Path(oldLogDir, "a.hlog.dat." +(now + 10000)));
|
fs.createNewFile(new Path(oldLogDir, "a.hlog.dat." + (now + 10000)));
|
||||||
|
|
||||||
assertEquals(34, fs.listStatus(oldLogDir).length);
|
assertEquals(34, fs.listStatus(oldLogDir).length);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue