HBASE-403 Fix build after move of hbase in svn

Part 1: jar and test targets works as do javacc, clean.
TODO: Package needs clean up.


git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@618453 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-02-04 21:48:13 +00:00
parent 7946db69f7
commit c6b29235e7
27 changed files with 129 additions and 201 deletions

View File

@ -41,20 +41,16 @@ the generated java classes and the web.xml. To run, do following:
-->
<project name="build.hbase.jsp" default="jspc">
<property name="lib.dir" value="${basedir}/lib" />
<property name="hadoop.root" location="${basedir}/../../../"/>
<property name="src.webapps" value="${basedir}/src/webapps" />
<property name="generated.webapps.src"
value="${basedir}/src/java"/>
<target name="jspc" >
<path id="jspc.classpath">
<fileset dir="${lib.dir}">
<include name="commons-el*jar" />
</fileset>
<fileset dir="${hadoop.root}/lib/jetty-ext/">
<fileset dir="${basedir}/lib/jetty-ext/">
<include name="*jar" />
</fileset>
<fileset dir="${hadoop.root}/lib/">
<fileset dir="${basedir}/lib/">
<include name="servlet-api*jar" />
<include name="commons-logging*jar" />
</fileset>

123
build.xml
View File

@ -18,18 +18,60 @@
-->
<project name="hbase" default="jar">
<import file="../build-contrib.xml"/>
<!-- Load all the default properties, and any the user wants -->
<!-- to contribute (without having to type -D or edit this file -->
<property file="${user.home}/${name}.build.properties" />
<property file="${basedir}/build.properties" />
<property name="build.webapps" value="${build.dir}/webapps"/>
<property name="build.lib" value="${build.dir}/lib"/>
<property name="build.conf" value="${build.dir}/conf"/>
<property name="build.bin" value="${build.dir}/bin"/>
<property name="src.webapps" value="${basedir}/src/webapps" />
<property name="src.dir" location="${basedir}/src/java"/>
<property name="src.test" location="${basedir}/src/test"/>
<property name="src.examples" location="${basedir}/src/examples"/>
<property name="src.webapps" location="${basedir}/src/webapps"/>
<property name="test.output" value="no"/>
<property name="test.timeout" value="900000"/>
<property name="build.dir" location="${basedir}/build"/>
<property name="build.bin" location="${build.dir}/bin"/>
<property name="build.conf" location="${build.dir}/conf"/>
<property name="build.webapps" location="${build.dir}/webpps"/>
<property name="build.lib" location="${build.dir}/lib"/>
<property name="build.classes" location="${build.dir}/classes"/>
<property name="build.test" location="${build.dir}/test"/>
<property name="build.examples" location="${build.dir}/examples"/>
<property name="test.build.dir" value="${build.dir}/test"/>
<property name="test.log.dir" value="${test.build.dir}/logs"/>
<property name="test.junit.output.format" value="plain"/>
<!-- all jars together -->
<property name="javac.deprecation" value="off"/>
<property name="javac.debug" value="on"/>
<property name="javadoc.link"
value="http://java.sun.com/j2se/1.5.0/docs/api/"/>
<property name="build.encoding" value="ISO-8859-1"/>
<!-- the normal classpath -->
<fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar"/>
<path id="classpath">
<pathelement location="${build.classes}"/>
<fileset refid="lib.jars"/>
<fileset dir="${basedir}/lib/jetty-ext/">
<include name="*jar" />
</fileset>
</path>
<target name="init">
<antcall target="hadoopbuildcontrib.init"/>
<mkdir dir="${build.dir}"/>
<mkdir dir="${build.classes}"/>
<mkdir dir="${build.test}"/>
<mkdir dir="${build.examples}"/>
<!--Version is set only if called from hadoop build.xml. Set a default-->
<condition property="version" value="0.15.0-dev">
<condition property="version" value="0.1.0-dev">
<not>
<isset property="version" />
</not>
@ -63,8 +105,8 @@
</target>
<target name="javacc" if="javacc.home">
<echo message="javacc.home: ${javacc.home}"/>
<property name="hql.src.dir"
<echo message="javacc.home: ${javacc.home}"/>
<property name="hql.src.dir"
value="${src.dir}/org/apache/hadoop/hbase/hql" />
<mkdir dir="${hql.src.dir}/generated" />
<javacc
@ -75,7 +117,6 @@
</target>
<target name="compile" depends="init,javacc">
<echo message="contrib: ${name}"/>
<!--Compile whats under src and generated java classes made from jsp-->
<javac
encoding="${build.encoding}"
@ -91,9 +132,9 @@
<!-- Override jar target to specify main class -->
<target name="jar" depends="compile">
<jar jarfile="${build.dir}/hadoop-${version}-${name}.jar"
<jar jarfile="${build.dir}/hbase-${version}.jar"
basedir="${build.classes}" >
<fileset file="${root}/conf/hbase-default.xml"/>
<fileset file="${basedir}/conf/hbase-default.xml"/>
<zipfileset dir="${build.webapps}" prefix="webapps"/>
</jar>
</target>
@ -101,6 +142,7 @@
<!--Manage our own packaging... install our dependencies,
bin, etc.-->
<target name="package" depends="jar" unless="skip.contrib">
<!--TODO!!!-->
<condition property="dist.dir" value="distribution">
<not>
<isset property="dist.dir" />
@ -110,7 +152,7 @@
<mkdir dir="${hbase.dist.dir}"/>
<copy todir="${hbase.dist.dir}" includeEmptyDirs="false" flatten="true">
<fileset dir="${build.dir}">
<include name="hadoop-${version}-${name}.jar" />
<include name="hbase-${version}.jar" />
</fileset>
</copy>
<mkdir dir="${hbase.dist.dir}/webapps"/>
@ -138,8 +180,7 @@
<!-- Override compile-test target so can generate a hbase
test jar that has test and hbase classes.
-->
<target name="compile-test" depends="compile" if="test.available">
<echo message="contrib: ${name}"/>
<target name="compile-test" depends="compile" >
<javac
encoding="${build.encoding}"
srcdir="${src.test}"
@ -165,12 +206,54 @@
<path id="test.classpath">
<pathelement location="${build.test}" />
<pathelement location="${src.test}"/>
<pathelement location="${hadoop.root}/build/test/classes"/>
<pathelement location="${hadoop.root}/src/contrib/test"/>
<pathelement location="${conf.dir}"/>
<pathelement location="${hadoop.root}/build"/>
<pathelement location="${root}/conf"/>
<pathelement location="${build.dir}"/>
<path refid="classpath"/>
</path>
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
<target name="test" depends="compile-test, compile" >
<delete dir="${test.log.dir}"/>
<mkdir dir="${test.log.dir}"/>
<junit
printsummary="yes" showoutput="${test.output}"
haltonfailure="no" fork="yes" maxmemory="256m"
errorProperty="tests.failed" failureProperty="tests.failed"
timeout="${test.timeout}">
<sysproperty key="test.build.data" value="${build.test}/data"/>
<sysproperty key="build.test" value="${build.test}"/>
<sysproperty key="contrib.name" value="${name}"/>
<!-- requires fork=yes for:
relative File paths to use the specified user.dir
classpath to use build/contrib/*.jar
-->
<sysproperty key="user.dir" value="${build.test}/data"/>
<sysproperty key="fs.default.name" value="${fs.default.name}"/>
<sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
<sysproperty key="test.log.dir" value="${hadoop.log.dir}"/>
<classpath refid="test.classpath"/>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${build.test}" unless="testcase">
<fileset dir="${src.test}"
includes="**/Test*.java" excludes="**/${test.exclude}.java" />
</batchtest>
<batchtest todir="${build.test}" if="testcase">
<fileset dir="${src.test}" includes="**/${testcase}.java"/>
</batchtest>
</junit>
<fail if="tests.failed">Tests failed!</fail>
</target>
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
<target name="clean">
<delete dir="${build.dir}"/>
</target>
</project>

View File

@ -221,12 +221,6 @@
such as hlog.
</description>
</property>
<property>
<name>hbase.hstore.blockCache.blockSize</name>
<value>65536</value>
<description>The size of each block in any block caches.
</description>
</property>
<!-- HbaseShell Configurations -->
<property>

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/hadoop-0.16.0-core.jar Normal file

Binary file not shown.

BIN
lib/hadoop-0.16.0-test.jar Normal file

Binary file not shown.

BIN
lib/jetty-5.1.4.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/jetty-ext/jsp-api.jar Normal file

Binary file not shown.

BIN
lib/junit-3.8.1.jar Normal file

Binary file not shown.

BIN
lib/log4j-1.2.13.jar Normal file

Binary file not shown.

BIN
lib/servlet-api.jar Normal file

Binary file not shown.

BIN
lib/xmlenc-0.52.jar Normal file

Binary file not shown.

View File

@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.TextSequence;
public class HColumnDescriptor implements WritableComparable {
// For future backward compatibility
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)2;
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)1;
/** Legal family names can only contain 'word characters' and end in a colon. */
public static final Pattern LEGAL_FAMILY_NAME = Pattern.compile("\\w+:");
@ -76,11 +76,6 @@ public class HColumnDescriptor implements WritableComparable {
*/
public static final boolean DEFAULT_IN_MEMORY = false;
/**
* Default setting for whether to use a block cache or not.
*/
public static final boolean DEFAULT_BLOCK_CACHE_ENABLED = false;
/**
* Default maximum length of cell contents.
*/
@ -100,8 +95,6 @@ public class HColumnDescriptor implements WritableComparable {
private CompressionType compressionType;
// Serve reads from in-memory cache
private boolean inMemory;
// Serve reads from in-memory block cache
private boolean blockCacheEnabled;
// Maximum value size
private int maxValueLength;
// True if bloom filter was specified
@ -130,7 +123,6 @@ public class HColumnDescriptor implements WritableComparable {
this(columnName == null || columnName.length() <= 0?
new Text(): new Text(columnName),
DEFAULT_N_VERSIONS, DEFAULT_COMPRESSION_TYPE, DEFAULT_IN_MEMORY,
DEFAULT_BLOCK_CACHE_ENABLED,
Integer.MAX_VALUE, DEFAULT_BLOOM_FILTER_DESCRIPTOR);
}
@ -142,7 +134,6 @@ public class HColumnDescriptor implements WritableComparable {
* @param compression Compression type
* @param inMemory If true, column data should be kept in an HRegionServer's
* cache
* @param blockCacheEnabled If true, MapFile blocks should be cached
* @param maxValueLength Restrict values to &lt;= this value
* @param bloomFilter Enable the specified bloom filter for this column
*
@ -153,7 +144,6 @@ public class HColumnDescriptor implements WritableComparable {
*/
public HColumnDescriptor(final Text name, final int maxVersions,
final CompressionType compression, final boolean inMemory,
final boolean blockCacheEnabled,
final int maxValueLength, final BloomFilterDescriptor bloomFilter) {
String familyStr = name.toString();
// Test name if not null (It can be null when deserializing after
@ -175,7 +165,6 @@ public class HColumnDescriptor implements WritableComparable {
}
this.maxVersions = maxVersions;
this.inMemory = inMemory;
this.blockCacheEnabled = blockCacheEnabled;
this.maxValueLength = maxValueLength;
this.bloomFilter = bloomFilter;
this.bloomFilterSpecified = this.bloomFilter == null ? false : true;
@ -223,13 +212,6 @@ public class HColumnDescriptor implements WritableComparable {
return this.inMemory;
}
/**
* @return True if MapFile blocks should be cached.
*/
public boolean isBlockCacheEnabled() {
return blockCacheEnabled;
}
/**
* @return Maximum value length.
*/
@ -252,7 +234,6 @@ public class HColumnDescriptor implements WritableComparable {
return "{name: " + tmp.substring(0, tmp.length() - 1) +
", max versions: " + maxVersions +
", compression: " + this.compressionType + ", in memory: " + inMemory +
", block cache enabled: " + blockCacheEnabled +
", max length: " + maxValueLength + ", bloom filter: " +
(bloomFilterSpecified ? bloomFilter.toString() : "none") + "}";
}
@ -270,7 +251,6 @@ public class HColumnDescriptor implements WritableComparable {
result ^= Integer.valueOf(this.maxVersions).hashCode();
result ^= this.compressionType.hashCode();
result ^= Boolean.valueOf(this.inMemory).hashCode();
result ^= Boolean.valueOf(this.blockCacheEnabled).hashCode();
result ^= Integer.valueOf(this.maxValueLength).hashCode();
result ^= Boolean.valueOf(this.bloomFilterSpecified).hashCode();
result ^= Byte.valueOf(this.versionNumber).hashCode();
@ -297,10 +277,6 @@ public class HColumnDescriptor implements WritableComparable {
bloomFilter = new BloomFilterDescriptor();
bloomFilter.readFields(in);
}
if (this.versionNumber > 1) {
this.blockCacheEnabled = in.readBoolean();
}
}
/** {@inheritDoc} */
@ -316,8 +292,6 @@ public class HColumnDescriptor implements WritableComparable {
if(bloomFilterSpecified) {
bloomFilter.write(out);
}
out.writeBoolean(this.blockCacheEnabled);
}
// Comparable
@ -353,18 +327,6 @@ public class HColumnDescriptor implements WritableComparable {
}
}
if(result == 0) {
if(this.blockCacheEnabled == other.blockCacheEnabled) {
result = 0;
} else if(this.blockCacheEnabled) {
result = -1;
} else {
result = 1;
}
}
if(result == 0) {
result = other.maxValueLength - this.maxValueLength;
}

View File

@ -741,19 +741,9 @@ public class HStore implements HConstants {
// Finally, start up all the map readers! (There could be more than one
// since we haven't compacted yet.)
boolean first = true;
for(Map.Entry<Long, HStoreFile> e: this.storefiles.entrySet()) {
if (first) {
// Use a block cache (if configured) for the first reader only
// so as to control memory usage.
this.readers.put(e.getKey(),
e.getValue().getReader(this.fs, this.bloomFilter,
family.isBlockCacheEnabled()));
first = false;
} else {
this.readers.put(e.getKey(),
e.getValue().getReader(this.fs, this.bloomFilter));
}
this.readers.put(e.getKey(),
e.getValue().getReader(this.fs, this.bloomFilter));
}
}
@ -1570,10 +1560,7 @@ public class HStore implements HConstants {
// 6. Loading the new TreeMap.
Long orderVal = Long.valueOf(finalCompactedFile.loadInfo(fs));
this.readers.put(orderVal,
// Use a block cache (if configured) for this reader since
// it is the only one.
finalCompactedFile.getReader(this.fs, this.bloomFilter,
family.isBlockCacheEnabled()));
finalCompactedFile.getReader(this.fs, this.bloomFilter));
this.storefiles.put(orderVal, finalCompactedFile);
} catch (IOException e) {
e = RemoteExceptionHandler.checkIOException(e);

View File

@ -31,11 +31,9 @@ import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.BlockFSInputStream;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.MapFile;
@ -415,37 +413,17 @@ public class HStoreFile implements HConstants {
* @return MapFile.Reader
* @throws IOException
*/
public MapFile.Reader getReader(final FileSystem fs,
final Filter bloomFilter)
throws IOException {
return isReference()?
new HStoreFile.HalfMapFileReader(fs, getMapFilePath(reference).toString(),
conf, reference.getFileRegion(), reference.getMidkey(), bloomFilter):
new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
conf, bloomFilter);
}
/**
* Get reader for the store file map file.
* Client is responsible for closing file when done.
* @param fs
* @param bloomFilter If null, no filtering is done.
* @param blockCacheEnabled If true, MapFile blocks should be cached.
* @return MapFile.Reader
* @throws IOException
*/
public synchronized MapFile.Reader getReader(final FileSystem fs,
final Filter bloomFilter, final boolean blockCacheEnabled)
final Filter bloomFilter)
throws IOException {
if (isReference()) {
return new HStoreFile.HalfMapFileReader(fs,
getMapFilePath(reference).toString(), conf,
reference.getFileRegion(), reference.getMidkey(), bloomFilter,
blockCacheEnabled);
reference.getFileRegion(), reference.getMidkey(), bloomFilter);
}
return new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
conf, bloomFilter, blockCacheEnabled);
conf, bloomFilter);
}
/**
@ -606,13 +584,8 @@ public class HStoreFile implements HConstants {
*/
static class HbaseMapFile extends MapFile {
/**
* A reader capable of reading and caching blocks of the data file.
*/
static class HbaseReader extends MapFile.Reader {
private final boolean blockCacheEnabled;
/**
* @param fs
* @param dirName
@ -621,23 +594,7 @@ public class HStoreFile implements HConstants {
*/
public HbaseReader(FileSystem fs, String dirName, Configuration conf)
throws IOException {
this(fs, dirName, conf, false);
}
/**
* @param fs
* @param dirName
* @param conf
* @param blockCacheEnabled
* @throws IOException
*/
public HbaseReader(FileSystem fs, String dirName, Configuration conf,
boolean blockCacheEnabled)
throws IOException {
super(fs, dirName, null, conf, false); // defer opening streams
this.blockCacheEnabled = blockCacheEnabled;
open(fs, dirName, null, conf);
super(fs, dirName, conf);
// Force reading of the mapfile index by calling midKey.
// Reading the index will bring the index into memory over
// here on the client and then close the index file freeing
@ -648,28 +605,6 @@ public class HStoreFile implements HConstants {
// using up datanode resources. See HADOOP-2341.
midKey();
}
@Override
protected org.apache.hadoop.io.SequenceFile.Reader createDataFileReader(
FileSystem fs, Path dataFile, Configuration conf)
throws IOException {
if (!blockCacheEnabled) {
return super.createDataFileReader(fs, dataFile, conf);
}
LOG.info("Block Cache enabled");
final int blockSize = conf.getInt("hbase.hstore.blockCache.blockSize",
64 * 1024);
return new SequenceFile.Reader(fs, dataFile, conf) {
@Override
protected FSDataInputStream openFile(FileSystem fs, Path file,
int bufferSize, long length) throws IOException {
return new FSDataInputStream(new BlockFSInputStream(
super.openFile(fs, file, bufferSize, length), length,
blockSize));
}
};
}
}
static class HbaseWriter extends MapFile.Writer {
@ -718,13 +653,6 @@ public class HStoreFile implements HConstants {
bloomFilter = filter;
}
public Reader(FileSystem fs, String dirName, Configuration conf,
final Filter filter, final boolean blockCacheEnabled)
throws IOException {
super(fs, dirName, conf, blockCacheEnabled);
bloomFilter = filter;
}
/** {@inheritDoc} */
@Override
public Writable get(WritableComparable key, Writable val)
@ -817,7 +745,7 @@ public class HStoreFile implements HConstants {
final Configuration conf, final Range r,
final WritableComparable midKey)
throws IOException {
this(fs, dirName, conf, r, midKey, null, false);
this(fs, dirName, conf, r, midKey, null);
}
HalfMapFileReader(final FileSystem fs, final String dirName,
@ -829,16 +757,6 @@ public class HStoreFile implements HConstants {
midkey = midKey;
}
HalfMapFileReader(final FileSystem fs, final String dirName,
final Configuration conf, final Range r,
final WritableComparable midKey, final Filter filter,
final boolean blockCacheEnabled)
throws IOException {
super(fs, dirName, conf, filter, blockCacheEnabled);
top = isTopFileRegion(r);
midkey = midKey;
}
@SuppressWarnings("unchecked")
private void checkKey(final WritableComparable key)
throws IOException {

View File

@ -43,15 +43,15 @@ public class HTableDescriptor implements WritableComparable {
public static final HTableDescriptor rootTableDesc =
new HTableDescriptor(HConstants.ROOT_TABLE_NAME,
new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
HColumnDescriptor.CompressionType.NONE, false, false,
Integer.MAX_VALUE, null));
HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE,
null));
/** table descriptor for meta table */
public static final HTableDescriptor metaTableDesc =
new HTableDescriptor(HConstants.META_TABLE_NAME,
new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
HColumnDescriptor.CompressionType.NONE, false, false,
Integer.MAX_VALUE, null));
HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE,
null));
private boolean rootregion;
private boolean metaregion;
@ -256,4 +256,4 @@ public class HTableDescriptor implements WritableComparable {
public static Path getTableDir(Path rootdir, Text tableName) {
return new Path(rootdir, tableName.toString());
}
}
}

View File

@ -202,8 +202,6 @@ public class AlterCommand extends SchemaModificationCommand {
.get(spec)).toUpperCase());
} else if (spec.equals("IN_MEMORY")) {
inMemory = (Boolean) columnSpec.get(spec);
} else if (spec.equals("BLOCK_CACHE_ENABLED")) {
blockCacheEnabled = (Boolean) columnSpec.get(spec);
} else if (spec.equals("BLOOMFILTER")) {
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
.toUpperCase());
@ -231,8 +229,7 @@ public class AlterCommand extends SchemaModificationCommand {
column = appendDelimiter(column);
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
maxVersions, compression, inMemory, blockCacheEnabled,
maxLength, bloomFilterDesc);
maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
return columnDesc;
}
@ -246,7 +243,6 @@ public class AlterCommand extends SchemaModificationCommand {
maxLength = original.getMaxValueLength();
compression = original.getCompression();
inMemory = original.isInMemory();
blockCacheEnabled = original.isBlockCacheEnabled();
bloomFilterDesc = original.getBloomFilter();
}
}

View File

@ -37,7 +37,6 @@ public abstract class SchemaModificationCommand extends BasicCommand {
protected int maxLength;
protected HColumnDescriptor.CompressionType compression;
protected boolean inMemory;
protected boolean blockCacheEnabled;
protected BloomFilterDescriptor bloomFilterDesc;
protected BloomFilterType bloomFilterType;
protected int vectorSize;
@ -53,7 +52,6 @@ public abstract class SchemaModificationCommand extends BasicCommand {
maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;
inMemory = HColumnDescriptor.DEFAULT_IN_MEMORY;
blockCacheEnabled = HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED;
bloomFilterDesc = HColumnDescriptor.DEFAULT_BLOOM_FILTER_DESCRIPTOR;
}
@ -78,8 +76,6 @@ public abstract class SchemaModificationCommand extends BasicCommand {
.valueOf(((String) columnSpec.get(spec)).toUpperCase());
} else if (spec.equals("IN_MEMORY")) {
inMemory = (Boolean) columnSpec.get(spec);
} else if (spec.equals("BLOCK_CACHE_ENABLED")) {
blockCacheEnabled = (Boolean) columnSpec.get(spec);
} else if (spec.equals("BLOOMFILTER")) {
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
.toUpperCase());
@ -107,8 +103,7 @@ public abstract class SchemaModificationCommand extends BasicCommand {
column = appendDelimiter(column);
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
maxVersions, compression, inMemory, blockCacheEnabled,
maxLength, bloomFilterDesc);
maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
return columnDesc;
}

View File

@ -184,11 +184,11 @@ public abstract class HBaseTestCase extends TestCase {
final int versions) {
HTableDescriptor htd = new HTableDescriptor(name);
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME1), versions,
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
CompressionType.NONE, false, Integer.MAX_VALUE, null));
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME2), versions,
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
CompressionType.NONE, false, Integer.MAX_VALUE, null));
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME3), versions,
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
CompressionType.NONE, false, Integer.MAX_VALUE, null));
return htd;
}
@ -565,4 +565,4 @@ public abstract class HBaseTestCase extends TestCase {
return this.table.get(row, column, ts, versions);
}
}
}
}

View File

@ -169,7 +169,6 @@ public class TestBloomFilters extends HBaseClusterTestCase {
1, // Max versions
HColumnDescriptor.CompressionType.NONE, // no compression
HColumnDescriptor.DEFAULT_IN_MEMORY, // not in memory
HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED,
HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
bloomFilter
)
@ -235,7 +234,6 @@ public class TestBloomFilters extends HBaseClusterTestCase {
1, // Max versions
HColumnDescriptor.CompressionType.NONE, // no compression
HColumnDescriptor.DEFAULT_IN_MEMORY, // not in memory
HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED,
HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
bloomFilter
)

View File

@ -337,7 +337,7 @@ public class TestTimestamp extends HBaseTestCase {
private HRegion createRegion() throws IOException {
HTableDescriptor htd = createTableDescriptor(getName());
htd.addFamily(new HColumnDescriptor(COLUMN, VERSIONS,
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
CompressionType.NONE, false, Integer.MAX_VALUE, null));
return createNewHRegion(htd, null, null);
}
}
}

View File

@ -44,9 +44,8 @@ public class TestToString extends TestCase {
HTableDescriptor htd = HTableDescriptor.rootTableDesc;
System. out.println(htd.toString());
assertEquals("Table descriptor", "name: -ROOT-, families: {info:={name: " +
"info, max versions: 1, compression: NONE, in memory: false, " +
"block cache enabled: false, max length: 2147483647, " +
"bloom filter: none}}", htd.toString());
"info, max versions: 1, compression: NONE, in memory: false, max " +
"length: 2147483647, bloom filter: none}}", htd.toString());
}
/**