HADOOP-2395 Implement "ALTER TABLE ... CHANGE column" operation

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@603304 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2007-12-11 17:07:12 +00:00
parent f9fc02ba8b
commit 29e68d505c
7 changed files with 199 additions and 12 deletions

View File

@ -15,6 +15,8 @@ Trunk (unreleased changes)
HADOOP-1550 No means of deleting a'row' (Bryan Duxbuery via Stack)
HADOOP-2384 Delete all members of a column family on a specific row
(Bryan Duxbury via Stack)
HADOOP-2395 Implement "ALTER TABLE ... CHANGE column" operation
(Bryan Duxbury via Stack)
OPTIMIZATIONS

View File

@ -479,6 +479,31 @@ public class HBaseAdmin implements HConstants {
throw RemoteExceptionHandler.decodeRemoteException(e);
}
}
/**
* Modify an existing column family on a table
*
* @param tableName name of table
* @param columnName name of column to be modified
* @param descriptor new column descriptor to use
* @throws IOException
*/
public void modifyColumn(Text tableName, Text columnName,
HColumnDescriptor descriptor)
throws IOException {
if (this.master == null) {
throw new MasterNotRunningException("master has been shut down");
}
checkReservedTableName(tableName);
try {
this.master.modifyColumn(tableName, columnName, descriptor);
} catch (RemoteException e) {
throw RemoteExceptionHandler.decodeRemoteException(e);
}
}
/**
* Shuts down the HBase instance

View File

@ -2635,6 +2635,13 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
new AddColumn(tableName, column).process();
}
/** {@inheritDoc} */
public void modifyColumn(Text tableName, Text columnName,
HColumnDescriptor descriptor)
throws IOException {
new ModifyColumn(tableName, columnName, descriptor).process();
}
/** {@inheritDoc} */
public void deleteColumn(Text tableName, Text columnName) throws IOException {
new DeleteColumn(tableName, HStoreKey.extractFamily(columnName)).process();
@ -3076,6 +3083,41 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
}
}
/** Instantiated to modify an existing column family on a table */
private class ModifyColumn extends ColumnOperation {
private HColumnDescriptor descriptor;
private Text columnName;
ModifyColumn(Text tableName, Text columnName, HColumnDescriptor _descriptor)
throws IOException {
super(tableName);
this.descriptor = _descriptor;
this.columnName = columnName;
}
@Override
protected void postProcessMeta(MetaRegion m, HRegionInterface server)
throws IOException {
for (HRegionInfo i: unservedRegions) {
// get the column families map from the table descriptor
Map<Text, HColumnDescriptor> families = i.getTableDesc().families();
// if the table already has this column, then put the new descriptor
// version.
if (families.get(columnName) != null){
families.put(columnName, descriptor);
updateRegionInfo(server, m.getRegionName(), i);
}
else{ // otherwise, we have an error.
throw new IOException("Column family '" + columnName +
"' doesn't exist, so cannot be modified.");
}
}
}
}
/*
* Managing leases
*/

View File

@ -60,6 +60,18 @@ public interface HMasterInterface extends VersionedProtocol {
*/
public void addColumn(Text tableName, HColumnDescriptor column) throws IOException;
/**
* Modifies an existing column on the specified table
* @param tableName
* @param columnName name of the column to edit
* @param descriptor new column descriptor
* @throws IOException
*/
public void modifyColumn(Text tableName, Text columnName,
HColumnDescriptor descriptor)
throws IOException;
/**
* Deletes a column from the specified table
* @param tableName

View File

@ -28,10 +28,14 @@ import java.util.Set;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HConnection;
import org.apache.hadoop.hbase.HConnectionManager;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.BloomFilterDescriptor;
import org.apache.hadoop.hbase.BloomFilterDescriptor.BloomFilterType;
/**
* Alters tables.
*/
@ -79,8 +83,29 @@ public class AlterCommand extends SchemaModificationCommand {
enableTable(admin, tableName);
break;
case CHANGE:
// Not yet supported
return new ReturnMsg(0, "" + operationType + " is not yet supported.");
disableTable(admin, tableName);
Map.Entry<String, Map<String, Object>> columnEntry =
(Map.Entry<String, Map<String, Object>>)columnSpecMap.entrySet().toArray()[0];
// add the : if there isn't one
Text columnName = new Text(columnEntry.getKey().endsWith(":") ?
columnEntry.getKey() : columnEntry.getKey() + ":");
// get the table descriptor so we can get the old column descriptor
HTableDescriptor tDesc = getTableDescByName(admin, tableName);
HColumnDescriptor oldColumnDesc = tDesc.families().get(columnName);
// combine the options specified in the shell with the options
// from the exiting descriptor to produce the new descriptor
columnDesc = getColumnDescriptor(columnName.toString(),
columnEntry.getValue(), oldColumnDesc);
// send the changes out to the master
admin.modifyColumn(new Text(tableName), columnName, columnDesc);
enableTable(admin, tableName);
break;
case NOOP:
return new ReturnMsg(0, "Invalid operation type.");
}
@ -141,4 +166,84 @@ public class AlterCommand extends SchemaModificationCommand {
public CommandType getCommandType() {
return CommandType.DDL;
}
private HTableDescriptor getTableDescByName(HBaseAdmin admin, String tableName)
throws IOException{
HTableDescriptor[] tables = admin.listTables();
for(HTableDescriptor tDesc : tables){
if (tDesc.getName().toString().equals(tableName)) {
return tDesc;
}
}
return null;
}
/**
* Given a column name, column spec, and original descriptor, returns an
* instance of HColumnDescriptor representing the column spec, with empty
* values drawn from the original as defaults
*/
protected HColumnDescriptor getColumnDescriptor(String column,
Map<String, Object> columnSpec, HColumnDescriptor original)
throws IllegalArgumentException {
initOptions(original);
Set<String> specs = columnSpec.keySet();
for (String spec : specs) {
spec = spec.toUpperCase();
if (spec.equals("MAX_VERSIONS")) {
maxVersions = (Integer) columnSpec.get(spec);
} else if (spec.equals("MAX_LENGTH")) {
maxLength = (Integer) columnSpec.get(spec);
} else if (spec.equals("COMPRESSION")) {
compression = HColumnDescriptor.CompressionType
.valueOf(((String) columnSpec.get(spec)).toUpperCase());
} else if (spec.equals("IN_MEMORY")) {
inMemory = (Boolean) columnSpec.get(spec);
} else if (spec.equals("BLOOMFILTER")) {
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
.toUpperCase());
} else if (spec.equals("VECTOR_SIZE")) {
vectorSize = (Integer) columnSpec.get(spec);
} else if (spec.equals("NUM_HASH")) {
numHash = (Integer) columnSpec.get(spec);
} else if (spec.equals("NUM_ENTRIES")) {
numEntries = (Integer) columnSpec.get(spec);
} else {
throw new IllegalArgumentException("Invalid option: " + spec);
}
}
// Now we gather all the specified options for this column.
if (bloomFilterType != null) {
if (specs.contains("NUM_ENTRIES")) {
bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, numEntries);
} else {
bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, vectorSize,
numHash);
}
}
column = appendDelimiter(column);
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
return columnDesc;
}
private void initOptions(HColumnDescriptor original) {
if (original == null) {
initOptions();
return;
}
maxVersions = original.getMaxVersions();
maxLength = original.getMaxValueLength();
compression = original.getCompression();
inMemory = original.isInMemory();
bloomFilterDesc = original.getBloomFilter();
}
}

View File

@ -116,6 +116,7 @@ public class HelpCommand extends BasicCommand {
"Alter structure of table",
"ALTER TABLE table_name ADD column_spec | "
+ "ADD (column_spec, column_spec, ...) | "
+ "CHANGE column_family column_spec | "
+ "DROP column_family_name | " + "CHANGE column_spec;" });
load.put("EXIT", new String[] { "Exit shell", "EXIT;" });

View File

@ -33,21 +33,21 @@ import org.apache.hadoop.io.Text;
* Command. Provides utility methods for alteration operations.
*/
public abstract class SchemaModificationCommand extends BasicCommand {
private int maxVersions;
private int maxLength;
private HColumnDescriptor.CompressionType compression;
private boolean inMemory;
private BloomFilterDescriptor bloomFilterDesc;
private BloomFilterType bloomFilterType;
private int vectorSize;
private int numHash;
private int numEntries;
protected int maxVersions;
protected int maxLength;
protected HColumnDescriptor.CompressionType compression;
protected boolean inMemory;
protected BloomFilterDescriptor bloomFilterDesc;
protected BloomFilterType bloomFilterType;
protected int vectorSize;
protected int numHash;
protected int numEntries;
public SchemaModificationCommand(Writer o) {
super(o);
}
private void initOptions() {
protected void initOptions() {
maxVersions = HColumnDescriptor.DEFAULT_N_VERSIONS;
maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;