HBASE-996 Migration script to up the versions in catalog tables
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@713502 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
01f09c1036
commit
67fc6fdb02
|
@ -71,6 +71,7 @@ Release 0.19.0 - Unreleased
|
||||||
for root region"
|
for root region"
|
||||||
HBASE-990 NoSuchElementException in flushSomeRegions
|
HBASE-990 NoSuchElementException in flushSomeRegions
|
||||||
HBASE-602 HBase Crash when network card has a IPv6 address
|
HBASE-602 HBase Crash when network card has a IPv6 address
|
||||||
|
HBASE-996 Migration script to up the versions in catalog tables
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
HBASE-901 Add a limit to key length, check key and value length on client side
|
HBASE-901 Add a limit to key length, check key and value length on client side
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
|
|
||||||
# The java implementation to use. Required.
|
# The java implementation to use. Required.
|
||||||
# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
|
# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
|
||||||
export JAVA_HOME=/usr
|
export JAVA_HOME=/Users/stack/bin/jdk
|
||||||
|
|
||||||
# Extra Java CLASSPATH elements. Optional.
|
# Extra Java CLASSPATH elements. Optional.
|
||||||
# export HBASE_CLASSPATH=
|
# export HBASE_CLASSPATH=
|
||||||
|
|
|
@ -44,9 +44,10 @@ public interface HConstants {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Current version of file system
|
* Current version of file system
|
||||||
* Version 4 supports only one kind of bloom filter
|
* Version 4 supports only one kind of bloom filter.
|
||||||
|
* Version 5 changes versions in catalog table regions.
|
||||||
*/
|
*/
|
||||||
public static final String FILE_SYSTEM_VERSION = "4";
|
public static final String FILE_SYSTEM_VERSION = "5";
|
||||||
|
|
||||||
// Configuration parameters
|
// Configuration parameters
|
||||||
|
|
||||||
|
|
|
@ -21,40 +21,35 @@
|
||||||
package org.apache.hadoop.hbase.util;
|
package org.apache.hadoop.hbase.util;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
|
||||||
import org.apache.hadoop.util.GenericOptionsParser;
|
import org.apache.hadoop.util.GenericOptionsParser;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform a file system upgrade to convert older file layouts.
|
* Perform a migration.
|
||||||
* HBase keeps a file in hdfs named hbase.version just under the hbase.rootdir.
|
* HBase keeps a file in hdfs named hbase.version just under the hbase.rootdir.
|
||||||
* This file holds the version of the hbase data in the Filesystem. When the
|
* This file holds the version of the hbase data in the Filesystem. When the
|
||||||
* software changes in a manner incompatible with the data in the Filesystem,
|
* software changes in a manner incompatible with the data in the Filesystem,
|
||||||
* it updates its internal version number,
|
* it updates its internal version number,
|
||||||
* {@link HConstants#FILE_SYSTEM_VERSION}. This wrapper script manages moving
|
* {@link HConstants#FILE_SYSTEM_VERSION}. This wrapper script manages moving
|
||||||
* the filesystem across versions until there's a match with current software's
|
* the filesystem across versions until there's a match with current software's
|
||||||
* version number.
|
* version number. This script will only cross a particular version divide. You may
|
||||||
|
* need to install earlier or later hbase to migrate earlier (or older) versions.
|
||||||
*
|
*
|
||||||
* <p>This wrapper script comprises a set of migration steps. Which steps
|
* <p>This wrapper script comprises a set of migration steps. Which steps
|
||||||
* are run depends on the span between the version of the hbase data in the
|
* are run depends on the span between the version of the hbase data in the
|
||||||
|
@ -72,13 +67,12 @@ import org.apache.hadoop.util.ToolRunner;
|
||||||
* edits made by previous migration steps not being apparent in later migration
|
* edits made by previous migration steps not being apparent in later migration
|
||||||
* steps). The upshot is always verify presumptions migrating.
|
* steps). The upshot is always verify presumptions migrating.
|
||||||
*
|
*
|
||||||
* <p>This script will migrate an hbase 0.1 install to a 0.2 install only.
|
* <p>This script will migrate an hbase 0.18.x only.
|
||||||
*
|
*
|
||||||
* @see <a href="http://wiki.apache.org/hadoop/Hbase/HowToMigrate">How To Migration</a>
|
* @see <a href="http://wiki.apache.org/hadoop/Hbase/HowToMigrate">How To Migration</a>
|
||||||
*/
|
*/
|
||||||
public class Migrate extends Configured implements Tool {
|
public class Migrate extends Configured implements Tool {
|
||||||
private static final Log LOG = LogFactory.getLog(Migrate.class);
|
private static final Log LOG = LogFactory.getLog(Migrate.class);
|
||||||
|
|
||||||
private final HBaseConfiguration conf;
|
private final HBaseConfiguration conf;
|
||||||
private FileSystem fs;
|
private FileSystem fs;
|
||||||
|
|
||||||
|
@ -136,7 +130,7 @@ public class Migrate extends Configured implements Tool {
|
||||||
try {
|
try {
|
||||||
// Verify file system is up.
|
// Verify file system is up.
|
||||||
fs = FileSystem.get(conf); // get DFS handle
|
fs = FileSystem.get(conf); // get DFS handle
|
||||||
LOG.info("Verifying that file system is available...");
|
LOG.info("Verifying that file system is available..");
|
||||||
FSUtils.checkFileSystemAvailable(fs);
|
FSUtils.checkFileSystemAvailable(fs);
|
||||||
return true;
|
return true;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -147,7 +141,8 @@ public class Migrate extends Configured implements Tool {
|
||||||
|
|
||||||
private boolean notRunning() {
|
private boolean notRunning() {
|
||||||
// Verify HBase is down
|
// Verify HBase is down
|
||||||
LOG.info("Verifying that HBase is not running...");
|
LOG.info("Verifying that HBase is not running...." +
|
||||||
|
"Trys ten times to connect to running master");
|
||||||
try {
|
try {
|
||||||
HBaseAdmin.checkHBaseAvailable(conf);
|
HBaseAdmin.checkHBaseAvailable(conf);
|
||||||
LOG.fatal("HBase cluster must be off-line.");
|
LOG.fatal("HBase cluster must be off-line.");
|
||||||
|
@ -187,7 +182,7 @@ public class Migrate extends Configured implements Tool {
|
||||||
}
|
}
|
||||||
float version = Float.parseFloat(versionStr);
|
float version = Float.parseFloat(versionStr);
|
||||||
if (version == HBASE_0_1_VERSION ||
|
if (version == HBASE_0_1_VERSION ||
|
||||||
Integer.valueOf(versionStr) < PREVIOUS_VERSION) {
|
Integer.valueOf(versionStr).intValue() < PREVIOUS_VERSION) {
|
||||||
String msg = "Cannot upgrade from " + versionStr + " to " +
|
String msg = "Cannot upgrade from " + versionStr + " to " +
|
||||||
HConstants.FILE_SYSTEM_VERSION + " you must install hbase-0.2.x, run " +
|
HConstants.FILE_SYSTEM_VERSION + " you must install hbase-0.2.x, run " +
|
||||||
"the upgrade tool, reinstall this version and run this utility again." +
|
"the upgrade tool, reinstall this version and run this utility again." +
|
||||||
|
@ -196,7 +191,7 @@ public class Migrate extends Configured implements Tool {
|
||||||
throw new IOException(msg);
|
throw new IOException(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
// insert call to new migration method here.
|
migrateTo5();
|
||||||
|
|
||||||
if (!readOnly) {
|
if (!readOnly) {
|
||||||
// Set file system version
|
// Set file system version
|
||||||
|
@ -213,37 +208,65 @@ public class Migrate extends Configured implements Tool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private FileStatus[] getRootDirFiles() throws IOException {
|
// Move the fileystem version from 4 to 5.
|
||||||
FileStatus[] stats = fs.listStatus(FSUtils.getRootDir(this.conf));
|
// In here we rewrite the catalog table regions so they keep 10 versions
|
||||||
if (stats == null || stats.length == 0) {
|
// instead of 1.
|
||||||
throw new IOException("No files found under root directory " +
|
private void migrateTo5() throws IOException {
|
||||||
FSUtils.getRootDir(this.conf).toString());
|
if (this.readOnly && this.migrationNeeded) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
final MetaUtils utils = new MetaUtils(this.conf);
|
||||||
|
try {
|
||||||
|
updateVersions(utils.getRootRegion().getRegionInfo());
|
||||||
|
// Scan the root region
|
||||||
|
utils.scanRootRegion(new MetaUtils.ScannerListener() {
|
||||||
|
public boolean processRow(HRegionInfo info)
|
||||||
|
throws IOException {
|
||||||
|
if (readOnly && !migrationNeeded) {
|
||||||
|
migrationNeeded = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
updateVersions(utils.getRootRegion(), info);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
utils.shutdown();
|
||||||
}
|
}
|
||||||
return stats;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkForUnrecoveredLogFiles(FileStatus[] rootFiles)
|
/*
|
||||||
|
* Move from old pre-v5 hregioninfo to current HRegionInfo
|
||||||
|
* Persist back into <code>r</code>
|
||||||
|
* @param mr
|
||||||
|
* @param oldHri
|
||||||
|
*/
|
||||||
|
void updateVersions(HRegion mr, HRegionInfo oldHri)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
List<String> unrecoveredLogs = new ArrayList<String>();
|
if (!updateVersions(oldHri)) {
|
||||||
for (int i = 0; i < rootFiles.length; i++) {
|
return;
|
||||||
String name = rootFiles[i].getPath().getName();
|
|
||||||
if (name.startsWith("log_")) {
|
|
||||||
unrecoveredLogs.add(name);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (unrecoveredLogs.size() != 0) {
|
BatchUpdate b = new BatchUpdate(oldHri.getRegionName());
|
||||||
throw new IOException("There are " + unrecoveredLogs.size() +
|
b.put(HConstants.COL_REGIONINFO, Writables.getBytes(oldHri));
|
||||||
" unrecovered region server logs. Please uninstall this version of " +
|
mr.batchUpdate(b);
|
||||||
"HBase, re-install the previous version, start your cluster and " +
|
LOG.info("Upped versions on " + oldHri.getRegionNameAsString());
|
||||||
"shut it down cleanly, so that all region server logs are recovered" +
|
}
|
||||||
" and deleted. Or, if you are sure logs are vestige of old " +
|
|
||||||
"failures in hbase, remove them and then rerun the migration. " +
|
/*
|
||||||
"See 'Redo Logs' in http://wiki.apache.org/hadoop/Hbase/HowToMigrate. " +
|
* @param hri Update versions.
|
||||||
"Here are the problem log files: " + unrecoveredLogs);
|
* @param true if we changed value
|
||||||
|
*/
|
||||||
|
private boolean updateVersions(final HRegionInfo hri) {
|
||||||
|
HColumnDescriptor hcd =
|
||||||
|
hri.getTableDesc().getFamily(HConstants.COLUMN_FAMILY);
|
||||||
|
if (hcd.getMaxVersions() == 1) {
|
||||||
|
// Set it to 10, an arbitrary high number
|
||||||
|
hcd.setMaxVersions(10);
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("static-access")
|
|
||||||
private int parseArgs(String[] args) {
|
private int parseArgs(String[] args) {
|
||||||
Options opts = new Options();
|
Options opts = new Options();
|
||||||
GenericOptionsParser parser =
|
GenericOptionsParser parser =
|
||||||
|
@ -263,9 +286,11 @@ public class Migrate extends Configured implements Tool {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void usage() {
|
private void usage() {
|
||||||
System.err.println("Usage: bin/hbase migrate { check | upgrade } [options]\n");
|
System.err.println("Usage: bin/hbase migrate {check | upgrade} [options]");
|
||||||
|
System.err.println();
|
||||||
System.err.println(" check perform upgrade checks only.");
|
System.err.println(" check perform upgrade checks only.");
|
||||||
System.err.println(" upgrade perform upgrade checks and modify hbase.\n");
|
System.err.println(" upgrade perform upgrade checks and modify hbase.");
|
||||||
|
System.err.println();
|
||||||
System.err.println(" Options are:");
|
System.err.println(" Options are:");
|
||||||
System.err.println(" -conf <configuration file> specify an application configuration file");
|
System.err.println(" -conf <configuration file> specify an application configuration file");
|
||||||
System.err.println(" -D <property=value> use value for given property");
|
System.err.println(" -D <property=value> use value for given property");
|
||||||
|
@ -282,9 +307,9 @@ public class Migrate extends Configured implements Tool {
|
||||||
try {
|
try {
|
||||||
status = ToolRunner.run(new Migrate(), args);
|
status = ToolRunner.run(new Migrate(), args);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.error("exiting due to error", e);
|
LOG.error(e);
|
||||||
status = -1;
|
status = -1;
|
||||||
}
|
}
|
||||||
System.exit(status);
|
System.exit(status);
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue