HADOOP-2533 Scanning, just creating MapWritable in next consumes >20% CPU

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@611629 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-01-13 20:10:22 +00:00
parent d7de304808
commit f6f56a1d72
11 changed files with 251 additions and 45 deletions

View File

@ -33,6 +33,11 @@ Trunk (unreleased changes)
HADOOP-2479 Save on number of Text object creations HADOOP-2479 Save on number of Text object creations
HADOOP-2485 Make mapfile index interval configurable (Set default to 32 HADOOP-2485 Make mapfile index interval configurable (Set default to 32
instead of 128) instead of 128)
HADOOP-2553 Don't make Long objects calculating hbase type hash codes
HADOOP-2377 Holding open MapFile.Readers is expensive, so use less of them
HADOOP-2407 Keeping MapFile.Reader open is expensive: Part 2
HADOOP-2533 Performance: Scanning, just creating MapWritable in next
consumes >20% CPU
BUG FIXES BUG FIXES
HADOOP-2059 In tests, exceptions in min dfs shutdown should not fail test HADOOP-2059 In tests, exceptions in min dfs shutdown should not fail test
@ -104,8 +109,7 @@ Trunk (unreleased changes)
deleted deleted
HADOOP-2468 TestRegionServerExit failed in Hadoop-Nightly #338 HADOOP-2468 TestRegionServerExit failed in Hadoop-Nightly #338
HADOOP-2467 scanner truncates resultset when > 1 column families HADOOP-2467 scanner truncates resultset when > 1 column families
HADOOP-2503 REST Insert / Select encoding issue HADOOP-2503 REST Insert / Select encoding issue (Bryan Duxbury via Stack)
(Bryan Duxbury via Stack)
HADOOP-2505 formatter classes missing apache license HADOOP-2505 formatter classes missing apache license
HADOOP-2504 REST servlet method for deleting a scanner was not properly HADOOP-2504 REST servlet method for deleting a scanner was not properly
mapped (Bryan Duxbury via Stack) mapped (Bryan Duxbury via Stack)
@ -148,11 +152,9 @@ Trunk (unreleased changes)
HADOOP-2299 Support inclusive scans (Bryan Duxbury via Stack) HADOOP-2299 Support inclusive scans (Bryan Duxbury via Stack)
HADOOP-2333 Client side retries happen at the wrong level HADOOP-2333 Client side retries happen at the wrong level
HADOOP-2357 Compaction cleanup; less deleting + prevent possible file leaks HADOOP-2357 Compaction cleanup; less deleting + prevent possible file leaks
HADOOP-2377 Holding open MapFile.Readers is expensive, so use less of them
HADOOP-2392 TestRegionServerExit has new failure mode since HADOOP-2338 HADOOP-2392 TestRegionServerExit has new failure mode since HADOOP-2338
HADOOP-2370 Allow column families with an unlimited number of versions HADOOP-2370 Allow column families with an unlimited number of versions
(Edward Yoon via Stack) (Edward Yoon via Stack)
HADOOP-2407 Keeping MapFile.Reader open is expensive: Part 2
HADOOP-2047 Add an '--master=X' and '--html' command-line parameters to shell HADOOP-2047 Add an '--master=X' and '--html' command-line parameters to shell
(Edward Yoon via Stack) (Edward Yoon via Stack)
HADOOP-2351 If select command returns no result, it doesn't need to show the HADOOP-2351 If select command returns no result, it doesn't need to show the
@ -167,7 +169,6 @@ Trunk (unreleased changes)
(Edward Yoon via Stack) (Edward Yoon via Stack)
HADOOP-2450 Show version (and svn revision) in hbase web ui HADOOP-2450 Show version (and svn revision) in hbase web ui
HADOOP-2472 Range selection using filter (Edward Yoon via Stack) HADOOP-2472 Range selection using filter (Edward Yoon via Stack)
HADOOP-2553 Don't make Long objects calculating hbase type hash codes
HADOOP-2548 Make TableMap and TableReduce generic HADOOP-2548 Make TableMap and TableReduce generic
(Frederik Hedberg via Stack) (Frederik Hedberg via Stack)
HADOOP-2557 Shell count function (Edward Yoon via Stack) HADOOP-2557 Shell count function (Edward Yoon via Stack)

View File

@ -28,7 +28,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -190,7 +190,7 @@ public class HBaseAdmin implements HConstants {
scannerId = scannerId =
server.openScanner(firstMetaServer.getRegionInfo().getRegionName(), server.openScanner(firstMetaServer.getRegionInfo().getRegionName(),
COL_REGIONINFO_ARRAY, tableName, System.currentTimeMillis(), null); COL_REGIONINFO_ARRAY, tableName, System.currentTimeMillis(), null);
MapWritable values = server.next(scannerId); HbaseMapWritable values = server.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
break; break;
} }
@ -274,7 +274,7 @@ public class HBaseAdmin implements HConstants {
boolean isenabled = false; boolean isenabled = false;
while (true) { while (true) {
MapWritable values = server.next(scannerId); HbaseMapWritable values = server.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
if (valuesfound == 0) { if (valuesfound == 0) {
throw new NoSuchElementException( throw new NoSuchElementException(
@ -375,7 +375,7 @@ public class HBaseAdmin implements HConstants {
boolean disabled = false; boolean disabled = false;
while (true) { while (true) {
MapWritable values = server.next(scannerId); HbaseMapWritable values = server.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
if (valuesfound == 0) { if (valuesfound == 0) {
throw new NoSuchElementException("table " + tableName + " not found"); throw new NoSuchElementException("table " + tableName + " not found");

View File

@ -36,7 +36,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.HbaseRPC; import org.apache.hadoop.hbase.ipc.HbaseRPC;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -259,7 +259,7 @@ public class HConnectionManager implements HConstants {
null); null);
while (true) { while (true) {
MapWritable values = server.next(scannerId); HbaseMapWritable values = server.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
break; break;
} }
@ -715,7 +715,7 @@ public class HConnectionManager implements HConstants {
COLUMN_FAMILY_ARRAY, tableName, System.currentTimeMillis(), null); COLUMN_FAMILY_ARRAY, tableName, System.currentTimeMillis(), null);
while (true) { while (true) {
MapWritable values = server.next(scannerId); HbaseMapWritable values = server.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
if (servers.size() == 0) { if (servers.size() == 0) {
// If we didn't find any servers then the table does not exist // If we didn't find any servers then the table does not exist

View File

@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.InfoServer;
import org.apache.hadoop.hbase.util.Sleeper; import org.apache.hadoop.hbase.util.Sleeper;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -224,7 +224,7 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
int numberOfRegionsFound = 0; int numberOfRegionsFound = 0;
while (true) { while (true) {
SortedMap<Text, byte[]> results = new TreeMap<Text, byte[]>(); SortedMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
MapWritable values = regionServer.next(scannerId); HbaseMapWritable values = regionServer.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
break; break;
} }
@ -1185,6 +1185,9 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Started service threads"); LOG.debug("Started service threads");
} }
if (LOG.isDebugEnabled()) {
LOG.debug("Started service threads");
}
} }
/* /*
@ -1262,7 +1265,7 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
/** {@inheritDoc} */ /** {@inheritDoc} */
@SuppressWarnings("unused") @SuppressWarnings("unused")
public MapWritable regionServerStartup(HServerInfo serverInfo) public HbaseMapWritable regionServerStartup(HServerInfo serverInfo)
throws IOException { throws IOException {
String s = serverInfo.getServerAddress().toString().trim(); String s = serverInfo.getServerAddress().toString().trim();
@ -1315,12 +1318,12 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
* @return Subset of configuration to pass initializing regionservers: e.g. * @return Subset of configuration to pass initializing regionservers: e.g.
* the filesystem to use and root directory to use. * the filesystem to use and root directory to use.
*/ */
protected MapWritable createConfigurationSubset() { protected HbaseMapWritable createConfigurationSubset() {
MapWritable mw = addConfig(new MapWritable(), HConstants.HBASE_DIR); HbaseMapWritable mw = addConfig(new HbaseMapWritable(), HConstants.HBASE_DIR);
return addConfig(mw, "fs.default.name"); return addConfig(mw, "fs.default.name");
} }
private MapWritable addConfig(final MapWritable mw, final String key) { private HbaseMapWritable addConfig(final HbaseMapWritable mw, final String key) {
mw.put(new Text(key), new Text(this.conf.get(key))); mw.put(new Text(key), new Text(this.conf.get(key)));
return mw; return mw;
} }
@ -1992,7 +1995,7 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
try { try {
while (true) { while (true) {
MapWritable values = null; HbaseMapWritable values = null;
try { try {
values = server.next(scannerId); values = server.next(scannerId);
} catch (IOException e) { } catch (IOException e) {
@ -2588,7 +2591,7 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
long scannerid = server.openScanner(metaRegionName, COL_REGIONINFO_ARRAY, long scannerid = server.openScanner(metaRegionName, COL_REGIONINFO_ARRAY,
tableName, System.currentTimeMillis(), null); tableName, System.currentTimeMillis(), null);
try { try {
MapWritable data = server.next(scannerid); HbaseMapWritable data = server.next(scannerid);
// Test data and that the row for the data is for our table. If table // Test data and that the row for the data is for our table. If table
// does not exist, scanner will return row after where our table would // does not exist, scanner will return row after where our table would
@ -2743,7 +2746,7 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
String serverName = null; String serverName = null;
long startCode = -1L; long startCode = -1L;
MapWritable values = server.next(scannerId); HbaseMapWritable values = server.next(scannerId);
if(values == null || values.size() == 0) { if(values == null || values.size() == 0) {
break; break;
} }

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.ipc.VersionedProtocol;
/** /**
@ -39,7 +39,7 @@ public interface HMasterRegionInterface extends VersionedProtocol {
* @return Configuration for the regionserver to use: e.g. filesystem, * @return Configuration for the regionserver to use: e.g. filesystem,
* hbase rootdir, etc. * hbase rootdir, etc.
*/ */
public MapWritable regionServerStartup(HServerInfo info) throws IOException; public HbaseMapWritable regionServerStartup(HServerInfo info) throws IOException;
/** /**
* Called to renew lease, tell master what the region server is doing and to * Called to renew lease, tell master what the region server is doing and to

View File

@ -24,7 +24,7 @@ import java.io.IOException;
import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.ipc.VersionedProtocol;
@ -96,7 +96,7 @@ public interface HRegionInterface extends VersionedProtocol {
* @return map of values * @return map of values
* @throws IOException * @throws IOException
*/ */
public MapWritable getRow(final Text regionName, final Text row) public HbaseMapWritable getRow(final Text regionName, final Text row)
throws IOException; throws IOException;
/** /**
@ -107,7 +107,7 @@ public interface HRegionInterface extends VersionedProtocol {
* @return map of values * @return map of values
* @throws IOException * @throws IOException
*/ */
public MapWritable getRow(final Text regionName, final Text row, final long ts) public HbaseMapWritable getRow(final Text regionName, final Text row, final long ts)
throws IOException; throws IOException;
@ -192,7 +192,7 @@ public interface HRegionInterface extends VersionedProtocol {
* @return map of values * @return map of values
* @throws IOException * @throws IOException
*/ */
public MapWritable next(long scannerId) throws IOException; public HbaseMapWritable next(long scannerId) throws IOException;
/** /**
* Close a scanner * Close a scanner

View File

@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.util.InfoServer;
import org.apache.hadoop.hbase.util.Sleeper; import org.apache.hadoop.hbase.util.Sleeper;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.Server;
@ -909,7 +909,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
* Run init. Sets up hlog and starts up all server threads. * Run init. Sets up hlog and starts up all server threads.
* @param c Extra configuration. * @param c Extra configuration.
*/ */
private void init(final MapWritable c) throws IOException { private void init(final HbaseMapWritable c) throws IOException {
try { try {
for (Map.Entry<Writable, Writable> e: c.entrySet()) { for (Map.Entry<Writable, Writable> e: c.entrySet()) {
String key = e.getKey().toString(); String key = e.getKey().toString();
@ -1059,7 +1059,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
* Let the master know we're here * Let the master know we're here
* Run initialization using parameters passed us by the master. * Run initialization using parameters passed us by the master.
*/ */
private MapWritable reportForDuty() throws IOException { private HbaseMapWritable reportForDuty() throws IOException {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Telling master at " + LOG.debug("Telling master at " +
conf.get(MASTER_ADDRESS) + " that we are up"); conf.get(MASTER_ADDRESS) + " that we are up");
@ -1069,7 +1069,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
HMasterRegionInterface.class, HMasterRegionInterface.versionID, HMasterRegionInterface.class, HMasterRegionInterface.versionID,
new HServerAddress(conf.get(MASTER_ADDRESS)).getInetSocketAddress(), new HServerAddress(conf.get(MASTER_ADDRESS)).getInetSocketAddress(),
this.conf); this.conf);
MapWritable result = null; HbaseMapWritable result = null;
long lastMsg = 0; long lastMsg = 0;
while(!stopRequested.get()) { while(!stopRequested.get()) {
try { try {
@ -1375,20 +1375,20 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
public MapWritable getRow(final Text regionName, final Text row) public HbaseMapWritable getRow(final Text regionName, final Text row)
throws IOException { throws IOException {
return getRow(regionName, row, HConstants.LATEST_TIMESTAMP); return getRow(regionName, row, HConstants.LATEST_TIMESTAMP);
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
public MapWritable getRow(final Text regionName, final Text row, final long ts) public HbaseMapWritable getRow(final Text regionName, final Text row, final long ts)
throws IOException { throws IOException {
checkOpen(); checkOpen();
requestCount.incrementAndGet(); requestCount.incrementAndGet();
try { try {
HRegion region = getRegion(regionName); HRegion region = getRegion(regionName);
MapWritable result = new MapWritable(); HbaseMapWritable result = new HbaseMapWritable();
Map<Text, byte[]> map = region.getFull(row, ts); Map<Text, byte[]> map = region.getFull(row, ts);
for (Map.Entry<Text, byte []> es: map.entrySet()) { for (Map.Entry<Text, byte []> es: map.entrySet()) {
result.put(new HStoreKey(row, es.getKey()), result.put(new HStoreKey(row, es.getKey()),
@ -1404,7 +1404,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
/** {@inheritDoc} */ /** {@inheritDoc} */
public MapWritable next(final long scannerId) throws IOException { public HbaseMapWritable next(final long scannerId) throws IOException {
checkOpen(); checkOpen();
requestCount.incrementAndGet(); requestCount.incrementAndGet();
@ -1417,7 +1417,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
this.leases.renewLease(scannerId, scannerId); this.leases.renewLease(scannerId, scannerId);
// Collect values to be returned here // Collect values to be returned here
MapWritable values = new MapWritable(); HbaseMapWritable values = new HbaseMapWritable();
HStoreKey key = new HStoreKey(); HStoreKey key = new HStoreKey();
TreeMap<Text, byte []> results = new TreeMap<Text, byte []>(); TreeMap<Text, byte []> results = new TreeMap<Text, byte []>();
while (s.next(key, results)) { while (s.next(key, results)) {
@ -1445,7 +1445,6 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
/** {@inheritDoc} */ /** {@inheritDoc} */
public void batchUpdate(Text regionName, long timestamp, BatchUpdate b) public void batchUpdate(Text regionName, long timestamp, BatchUpdate b)
throws IOException { throws IOException {
checkOpen(); checkOpen();
this.requestCount.incrementAndGet(); this.requestCount.incrementAndGet();
HRegion region = getRegion(regionName); HRegion region = getRegion(regionName);

View File

@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -366,7 +366,7 @@ public class HTable implements HConstants {
*/ */
public SortedMap<Text, byte[]> getRow(Text row, long ts) throws IOException { public SortedMap<Text, byte[]> getRow(Text row, long ts) throws IOException {
checkClosed(); checkClosed();
MapWritable value = null; HbaseMapWritable value = null;
for (int tries = 0; tries < numRetries; tries++) { for (int tries = 0; tries < numRetries; tries++) {
HRegionLocation r = getRegionLocation(row); HRegionLocation r = getRegionLocation(row);
HRegionInterface server = HRegionInterface server =
@ -1063,7 +1063,7 @@ public class HTable implements HConstants {
if (this.closed) { if (this.closed) {
return false; return false;
} }
MapWritable values = null; HbaseMapWritable values = null;
// Clear the results so we don't inherit any values from any previous // Clear the results so we don't inherit any values from any previous
// calls to next. // calls to next.
results.clear(); results.clear();

View File

@ -0,0 +1,204 @@
/**
* Copyright 2008 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
/**
* A Writable Map.
* Like {@link org.apache.hadoop.io.MapWritable} but dumb. It will fail
* if passed a Writable it has not already been told about. Its also been
* primed with hbase Writables.
*/
public class HbaseMapWritable implements Map<Writable, Writable>, Writable,
Configurable {
private AtomicReference<Configuration> conf =
new AtomicReference<Configuration>();
// Static maps of code to class and vice versa. Includes types used in hbase
// only.
static final Map<Byte, Class<? extends Writable>> CODE_TO_CLASS =
new HashMap<Byte, Class<? extends Writable>>();
static final Map<Class<? extends Writable>, Byte> CLASS_TO_CODE =
new HashMap<Class<? extends Writable>, Byte>();
static {
byte code = 0;
addToMap(HStoreKey.class, code++);
addToMap(ImmutableBytesWritable.class, code++);
addToMap(Text.class, code++);
}
@SuppressWarnings("boxing")
private static void addToMap(final Class<? extends Writable> clazz,
final byte code) {
CLASS_TO_CODE.put(clazz, code);
CODE_TO_CLASS.put(code, clazz);
}
private Map<Writable, Writable> instance;
/** Default constructor. */
public HbaseMapWritable() {
super();
this.instance = new HashMap<Writable, Writable>();
}
/** @return the conf */
public Configuration getConf() {
return conf.get();
}
/** @param conf the conf to set */
public void setConf(Configuration conf) {
this.conf.set(conf);
}
/** {@inheritDoc} */
public void clear() {
instance.clear();
}
/** {@inheritDoc} */
public boolean containsKey(Object key) {
return instance.containsKey(key);
}
/** {@inheritDoc} */
public boolean containsValue(Object value) {
return instance.containsValue(value);
}
/** {@inheritDoc} */
public Set<Map.Entry<Writable, Writable>> entrySet() {
return instance.entrySet();
}
/** {@inheritDoc} */
public Writable get(Object key) {
return instance.get(key);
}
/** {@inheritDoc} */
public boolean isEmpty() {
return instance.isEmpty();
}
/** {@inheritDoc} */
public Set<Writable> keySet() {
return instance.keySet();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
public Writable put(Writable key, Writable value) {
return instance.put(key, value);
}
/** {@inheritDoc} */
public void putAll(Map<? extends Writable, ? extends Writable> t) {
for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
instance.put(e.getKey(), e.getValue());
}
}
/** {@inheritDoc} */
public Writable remove(Object key) {
return instance.remove(key);
}
/** {@inheritDoc} */
public int size() {
return instance.size();
}
/** {@inheritDoc} */
public Collection<Writable> values() {
return instance.values();
}
// Writable
/** @return the Class class for the specified id */
@SuppressWarnings({ "unchecked", "boxing" })
protected Class<?> getClass(byte id) {
return CODE_TO_CLASS.get(id);
}
/** @return the id for the specified Class */
@SuppressWarnings({ "unchecked", "boxing" })
protected byte getId(Class<?> clazz) {
Byte b = CLASS_TO_CODE.get(clazz);
if (b == null) {
throw new NullPointerException("Nothing for : " + clazz);
}
return b;
}
public void write(DataOutput out) throws IOException {
// Write out the number of entries in the map
out.writeInt(instance.size());
// Then write out each key/value pair
for (Map.Entry<Writable, Writable> e: instance.entrySet()) {
out.writeByte(getId(e.getKey().getClass()));
e.getKey().write(out);
out.writeByte(getId(e.getValue().getClass()));
e.getValue().write(out);
}
}
public void readFields(DataInput in) throws IOException {
// First clear the map. Otherwise we will just accumulate
// entries every time this method is called.
this.instance.clear();
// Read the number of entries in the map
int entries = in.readInt();
// Then read each key/value pair
for (int i = 0; i < entries; i++) {
Writable key = (Writable) ReflectionUtils.newInstance(getClass(
in.readByte()), getConf());
key.readFields(in);
Writable value = (Writable) ReflectionUtils.newInstance(getClass(
in.readByte()), getConf());
value.readFields(in);
instance.put(key, value);
}
}
}

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.filter.RowFilterSet; import org.apache.hadoop.hbase.filter.RowFilterSet;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -91,7 +91,7 @@ public class HbaseObjectWritable implements Writable, Configurable {
// Hadoop types // Hadoop types
addToMap(Text.class, code++); addToMap(Text.class, code++);
addToMap(Writable.class, code++); addToMap(Writable.class, code++);
addToMap(MapWritable.class, code++); addToMap(HbaseMapWritable.class, code++);
addToMap(NullInstance.class, code++); addToMap(NullInstance.class, code++);
try { try {
addToMap(Class.forName("[Lorg.apache.hadoop.io.Text;"), code++); addToMap(Class.forName("[Lorg.apache.hadoop.io.Text;"), code++);

View File

@ -38,10 +38,9 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.filter.RowFilterSet; import org.apache.hadoop.hbase.filter.RowFilterSet;
import org.apache.hadoop.hbase.filter.StopRowFilter; import org.apache.hadoop.hbase.filter.StopRowFilter;
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter; import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -391,7 +390,7 @@ public class DisabledTestScanner2 extends HBaseClusterTestCase {
System.currentTimeMillis(), null); System.currentTimeMillis(), null);
while (true) { while (true) {
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>(); TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
MapWritable values = regionServer.next(scannerId); HbaseMapWritable values = regionServer.next(scannerId);
if (values == null || values.size() == 0) { if (values == null || values.size() == 0) {
break; break;
} }