HBASE-839 Update hadoop libs in hbase; move hbase TRUNK on to an hadoop 0.19.0 RC

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@709203 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-10-30 16:43:07 +00:00
parent ac1074b8ce
commit 5a0a0df8bb
33 changed files with 42 additions and 35 deletions

View File

@ -82,6 +82,8 @@ Release 0.19.0 - Unreleased
get Rows using offset and limit (Sishen Freecity via Stack)
HBASE-817 Hbase/Shell Truncate
HBASE-949 Add an HBase Manual
HBASE-839 Update hadoop libs in hbase; move hbase TRUNK on to an hadoop
0.19.0 RC
NEW FEATURES
HBASE-875 Use MurmurHash instead of JenkinsHash [in bloomfilters]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -368,7 +368,7 @@ public class HbaseObjectWritable implements Writable, Configurable {
instance = Enum.valueOf((Class<? extends Enum>) declaredClass,
Text.readString(in));
} else { // Writable
Class<?> instanceClass = null;
Class instanceClass = null;
Byte b = in.readByte();
if (b.byteValue() == NOT_ENCODED) {
String className = Text.readString(in);
@ -407,5 +407,4 @@ public class HbaseObjectWritable implements Writable, Configurable {
public Configuration getConf() {
return this.conf;
}
}

View File

@ -90,7 +90,8 @@ public class HbaseRPC {
/** A method invocation, including the method name and its parameters.*/
private static class Invocation implements Writable, Configurable {
// Here we maintain two static maps of method names to code and vice versa.
// Here, for hbase, we maintain two static maps of method names to code and
// vice versa.
private static final Map<Byte, String> CODE_TO_METHODNAME =
new HashMap<Byte, String>();
private static final Map<String, Byte> METHODNAME_TO_CODE =
@ -104,6 +105,7 @@ public class HbaseRPC {
code = addToMap(HMasterRegionInterface.class, code);
code = addToMap(TransactionalRegionInterface.class, code);
}
// End of hbase modifications.
private String methodName;
@SuppressWarnings("unchecked")
@ -160,7 +162,7 @@ public class HbaseRPC {
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
StringBuilder buffer = new StringBuilder(256);
buffer.append(methodName);
buffer.append("(");
for (int i = 0; i < parameters.length; i++) {
@ -180,6 +182,7 @@ public class HbaseRPC {
return this.conf;
}
// Hbase additions.
private static void addToMap(final String name, final byte code) {
if (METHODNAME_TO_CODE.containsKey(name)) {
return;
@ -227,6 +230,7 @@ public class HbaseRPC {
}
out.writeByte(code.byteValue());
}
// End of hbase additions.
}
/* Cache a client using its socket factory as the hash key */
@ -250,6 +254,7 @@ public class HbaseRPC {
// per-job, we choose (a).
Client client = clients.get(factory);
if (client == null) {
// Make an hbase client instead of hadoop Client.
client = new HBaseClient(HbaseObjectWritable.class, conf, factory);
clients.put(factory, client);
} else {
@ -310,11 +315,17 @@ public class HbaseRPC {
public Object invoke(@SuppressWarnings("unused") Object proxy,
Method method, Object[] args)
throws Throwable {
long startTime = System.currentTimeMillis();
final boolean logDebug = LOG.isDebugEnabled();
long startTime = 0;
if (logDebug) {
startTime = System.currentTimeMillis();
}
HbaseObjectWritable value = (HbaseObjectWritable)
client.call(new Invocation(method, args), address, ticket);
long callTime = System.currentTimeMillis() - startTime;
LOG.debug("Call: " + method.getName() + " " + callTime);
if (logDebug) {
long callTime = System.currentTimeMillis() - startTime;
LOG.debug("Call: " + method.getName() + " " + callTime);
}
return value.get();
}
@ -391,6 +402,7 @@ public class HbaseRPC {
Configuration conf,
int maxAttempts
) throws IOException {
// HBase does limited number of reconnects which is different from hadoop.
int reconnectAttempts = 0;
while (true) {
try {

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.io.RowResult;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
@ -99,7 +100,7 @@ implements TableMap<ImmutableBytesWritable, RowResult>, Tool {
ImmutableBytesWritable.class, RowResult.class, c);
c.setReducerClass(IdentityReducer.class);
// First arg is the output directory.
c.setOutputPath(new Path(args[0]));
FileOutputFormat.setOutputPath(c, new Path(args[0]));
return c;
}

View File

@ -34,8 +34,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.DistributedFileSystem;
import org.apache.hadoop.dfs.FSConstants;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;

View File

@ -28,7 +28,7 @@ import java.net.URISyntaxException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.ipc;
import javax.net.SocketFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
/**
* Subclass of hadoop's Client just so we can make some methods accessible
@ -31,17 +32,10 @@ public class HBaseClient extends Client {
* @param conf
* @param factory
*/
public HBaseClient(Class valueClass, Configuration conf, SocketFactory factory) {
public HBaseClient(Class<? extends Writable> valueClass, Configuration conf,
SocketFactory factory) {
super(valueClass, conf, factory);
}
/**
* @param valueClass
* @param conf
*/
public HBaseClient(Class<?> valueClass, Configuration conf) {
super(valueClass, conf);
}
@Override
public void incCount() {

View File

@ -24,7 +24,7 @@ import java.io.PrintWriter;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.ReflectionUtils;

View File

@ -29,7 +29,7 @@ import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HColumnDescriptor.CompressionType;

View File

@ -31,7 +31,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.HBaseAdmin;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;

View File

@ -21,13 +21,12 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hdfs.MiniDFSCluster;
/**
* Test the functionality of deleteFamily.

View File

@ -25,7 +25,7 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.util.Bytes;

View File

@ -25,7 +25,7 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.TreeMap;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Reader;

View File

@ -25,7 +25,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HTableDescriptor;

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants;

View File

@ -24,7 +24,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.util.TreeMap;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.HBaseTestCase;

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants;

View File

@ -24,7 +24,7 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HColumnDescriptor;