HBASE-403 Fix build after move of hbase in svn
Part 1: jar and test targets works as do javacc, clean. TODO: Package needs clean up. git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@618453 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7946db69f7
commit
c6b29235e7
|
@ -41,20 +41,16 @@ the generated java classes and the web.xml. To run, do following:
|
||||||
-->
|
-->
|
||||||
<project name="build.hbase.jsp" default="jspc">
|
<project name="build.hbase.jsp" default="jspc">
|
||||||
<property name="lib.dir" value="${basedir}/lib" />
|
<property name="lib.dir" value="${basedir}/lib" />
|
||||||
<property name="hadoop.root" location="${basedir}/../../../"/>
|
|
||||||
<property name="src.webapps" value="${basedir}/src/webapps" />
|
<property name="src.webapps" value="${basedir}/src/webapps" />
|
||||||
<property name="generated.webapps.src"
|
<property name="generated.webapps.src"
|
||||||
value="${basedir}/src/java"/>
|
value="${basedir}/src/java"/>
|
||||||
|
|
||||||
<target name="jspc" >
|
<target name="jspc" >
|
||||||
<path id="jspc.classpath">
|
<path id="jspc.classpath">
|
||||||
<fileset dir="${lib.dir}">
|
<fileset dir="${basedir}/lib/jetty-ext/">
|
||||||
<include name="commons-el*jar" />
|
|
||||||
</fileset>
|
|
||||||
<fileset dir="${hadoop.root}/lib/jetty-ext/">
|
|
||||||
<include name="*jar" />
|
<include name="*jar" />
|
||||||
</fileset>
|
</fileset>
|
||||||
<fileset dir="${hadoop.root}/lib/">
|
<fileset dir="${basedir}/lib/">
|
||||||
<include name="servlet-api*jar" />
|
<include name="servlet-api*jar" />
|
||||||
<include name="commons-logging*jar" />
|
<include name="commons-logging*jar" />
|
||||||
</fileset>
|
</fileset>
|
||||||
|
|
123
build.xml
123
build.xml
|
@ -18,18 +18,60 @@
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<project name="hbase" default="jar">
|
<project name="hbase" default="jar">
|
||||||
<import file="../build-contrib.xml"/>
|
<!-- Load all the default properties, and any the user wants -->
|
||||||
|
<!-- to contribute (without having to type -D or edit this file -->
|
||||||
|
<property file="${user.home}/${name}.build.properties" />
|
||||||
|
<property file="${basedir}/build.properties" />
|
||||||
|
|
||||||
<property name="build.webapps" value="${build.dir}/webapps"/>
|
<property name="src.dir" location="${basedir}/src/java"/>
|
||||||
<property name="build.lib" value="${build.dir}/lib"/>
|
<property name="src.test" location="${basedir}/src/test"/>
|
||||||
<property name="build.conf" value="${build.dir}/conf"/>
|
<property name="src.examples" location="${basedir}/src/examples"/>
|
||||||
<property name="build.bin" value="${build.dir}/bin"/>
|
<property name="src.webapps" location="${basedir}/src/webapps"/>
|
||||||
<property name="src.webapps" value="${basedir}/src/webapps" />
|
|
||||||
|
<property name="test.output" value="no"/>
|
||||||
|
<property name="test.timeout" value="900000"/>
|
||||||
|
|
||||||
|
<property name="build.dir" location="${basedir}/build"/>
|
||||||
|
<property name="build.bin" location="${build.dir}/bin"/>
|
||||||
|
<property name="build.conf" location="${build.dir}/conf"/>
|
||||||
|
<property name="build.webapps" location="${build.dir}/webpps"/>
|
||||||
|
<property name="build.lib" location="${build.dir}/lib"/>
|
||||||
|
<property name="build.classes" location="${build.dir}/classes"/>
|
||||||
|
<property name="build.test" location="${build.dir}/test"/>
|
||||||
|
<property name="build.examples" location="${build.dir}/examples"/>
|
||||||
|
|
||||||
|
<property name="test.build.dir" value="${build.dir}/test"/>
|
||||||
|
<property name="test.log.dir" value="${test.build.dir}/logs"/>
|
||||||
|
<property name="test.junit.output.format" value="plain"/>
|
||||||
|
|
||||||
|
<!-- all jars together -->
|
||||||
|
<property name="javac.deprecation" value="off"/>
|
||||||
|
<property name="javac.debug" value="on"/>
|
||||||
|
|
||||||
|
<property name="javadoc.link"
|
||||||
|
value="http://java.sun.com/j2se/1.5.0/docs/api/"/>
|
||||||
|
|
||||||
|
<property name="build.encoding" value="ISO-8859-1"/>
|
||||||
|
|
||||||
|
<!-- the normal classpath -->
|
||||||
|
<fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar"/>
|
||||||
|
|
||||||
|
<path id="classpath">
|
||||||
|
<pathelement location="${build.classes}"/>
|
||||||
|
<fileset refid="lib.jars"/>
|
||||||
|
<fileset dir="${basedir}/lib/jetty-ext/">
|
||||||
|
<include name="*jar" />
|
||||||
|
</fileset>
|
||||||
|
</path>
|
||||||
|
|
||||||
<target name="init">
|
<target name="init">
|
||||||
<antcall target="hadoopbuildcontrib.init"/>
|
<mkdir dir="${build.dir}"/>
|
||||||
|
<mkdir dir="${build.classes}"/>
|
||||||
|
<mkdir dir="${build.test}"/>
|
||||||
|
<mkdir dir="${build.examples}"/>
|
||||||
|
|
||||||
<!--Version is set only if called from hadoop build.xml. Set a default-->
|
<!--Version is set only if called from hadoop build.xml. Set a default-->
|
||||||
<condition property="version" value="0.15.0-dev">
|
<condition property="version" value="0.1.0-dev">
|
||||||
<not>
|
<not>
|
||||||
<isset property="version" />
|
<isset property="version" />
|
||||||
</not>
|
</not>
|
||||||
|
@ -63,8 +105,8 @@
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<target name="javacc" if="javacc.home">
|
<target name="javacc" if="javacc.home">
|
||||||
<echo message="javacc.home: ${javacc.home}"/>
|
<echo message="javacc.home: ${javacc.home}"/>
|
||||||
<property name="hql.src.dir"
|
<property name="hql.src.dir"
|
||||||
value="${src.dir}/org/apache/hadoop/hbase/hql" />
|
value="${src.dir}/org/apache/hadoop/hbase/hql" />
|
||||||
<mkdir dir="${hql.src.dir}/generated" />
|
<mkdir dir="${hql.src.dir}/generated" />
|
||||||
<javacc
|
<javacc
|
||||||
|
@ -75,7 +117,6 @@
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<target name="compile" depends="init,javacc">
|
<target name="compile" depends="init,javacc">
|
||||||
<echo message="contrib: ${name}"/>
|
|
||||||
<!--Compile whats under src and generated java classes made from jsp-->
|
<!--Compile whats under src and generated java classes made from jsp-->
|
||||||
<javac
|
<javac
|
||||||
encoding="${build.encoding}"
|
encoding="${build.encoding}"
|
||||||
|
@ -91,9 +132,9 @@
|
||||||
|
|
||||||
<!-- Override jar target to specify main class -->
|
<!-- Override jar target to specify main class -->
|
||||||
<target name="jar" depends="compile">
|
<target name="jar" depends="compile">
|
||||||
<jar jarfile="${build.dir}/hadoop-${version}-${name}.jar"
|
<jar jarfile="${build.dir}/hbase-${version}.jar"
|
||||||
basedir="${build.classes}" >
|
basedir="${build.classes}" >
|
||||||
<fileset file="${root}/conf/hbase-default.xml"/>
|
<fileset file="${basedir}/conf/hbase-default.xml"/>
|
||||||
<zipfileset dir="${build.webapps}" prefix="webapps"/>
|
<zipfileset dir="${build.webapps}" prefix="webapps"/>
|
||||||
</jar>
|
</jar>
|
||||||
</target>
|
</target>
|
||||||
|
@ -101,6 +142,7 @@
|
||||||
<!--Manage our own packaging... install our dependencies,
|
<!--Manage our own packaging... install our dependencies,
|
||||||
bin, etc.-->
|
bin, etc.-->
|
||||||
<target name="package" depends="jar" unless="skip.contrib">
|
<target name="package" depends="jar" unless="skip.contrib">
|
||||||
|
<!--TODO!!!-->
|
||||||
<condition property="dist.dir" value="distribution">
|
<condition property="dist.dir" value="distribution">
|
||||||
<not>
|
<not>
|
||||||
<isset property="dist.dir" />
|
<isset property="dist.dir" />
|
||||||
|
@ -110,7 +152,7 @@
|
||||||
<mkdir dir="${hbase.dist.dir}"/>
|
<mkdir dir="${hbase.dist.dir}"/>
|
||||||
<copy todir="${hbase.dist.dir}" includeEmptyDirs="false" flatten="true">
|
<copy todir="${hbase.dist.dir}" includeEmptyDirs="false" flatten="true">
|
||||||
<fileset dir="${build.dir}">
|
<fileset dir="${build.dir}">
|
||||||
<include name="hadoop-${version}-${name}.jar" />
|
<include name="hbase-${version}.jar" />
|
||||||
</fileset>
|
</fileset>
|
||||||
</copy>
|
</copy>
|
||||||
<mkdir dir="${hbase.dist.dir}/webapps"/>
|
<mkdir dir="${hbase.dist.dir}/webapps"/>
|
||||||
|
@ -138,8 +180,7 @@
|
||||||
<!-- Override compile-test target so can generate a hbase
|
<!-- Override compile-test target so can generate a hbase
|
||||||
test jar that has test and hbase classes.
|
test jar that has test and hbase classes.
|
||||||
-->
|
-->
|
||||||
<target name="compile-test" depends="compile" if="test.available">
|
<target name="compile-test" depends="compile" >
|
||||||
<echo message="contrib: ${name}"/>
|
|
||||||
<javac
|
<javac
|
||||||
encoding="${build.encoding}"
|
encoding="${build.encoding}"
|
||||||
srcdir="${src.test}"
|
srcdir="${src.test}"
|
||||||
|
@ -165,12 +206,54 @@
|
||||||
<path id="test.classpath">
|
<path id="test.classpath">
|
||||||
<pathelement location="${build.test}" />
|
<pathelement location="${build.test}" />
|
||||||
<pathelement location="${src.test}"/>
|
<pathelement location="${src.test}"/>
|
||||||
<pathelement location="${hadoop.root}/build/test/classes"/>
|
|
||||||
<pathelement location="${hadoop.root}/src/contrib/test"/>
|
|
||||||
<pathelement location="${conf.dir}"/>
|
<pathelement location="${conf.dir}"/>
|
||||||
<pathelement location="${hadoop.root}/build"/>
|
|
||||||
<pathelement location="${root}/conf"/>
|
|
||||||
<pathelement location="${build.dir}"/>
|
<pathelement location="${build.dir}"/>
|
||||||
<path refid="classpath"/>
|
<path refid="classpath"/>
|
||||||
</path>
|
</path>
|
||||||
|
|
||||||
|
<!-- ================================================================== -->
|
||||||
|
<!-- Run unit tests -->
|
||||||
|
<!-- ================================================================== -->
|
||||||
|
<target name="test" depends="compile-test, compile" >
|
||||||
|
<delete dir="${test.log.dir}"/>
|
||||||
|
<mkdir dir="${test.log.dir}"/>
|
||||||
|
<junit
|
||||||
|
printsummary="yes" showoutput="${test.output}"
|
||||||
|
haltonfailure="no" fork="yes" maxmemory="256m"
|
||||||
|
errorProperty="tests.failed" failureProperty="tests.failed"
|
||||||
|
timeout="${test.timeout}">
|
||||||
|
|
||||||
|
<sysproperty key="test.build.data" value="${build.test}/data"/>
|
||||||
|
<sysproperty key="build.test" value="${build.test}"/>
|
||||||
|
<sysproperty key="contrib.name" value="${name}"/>
|
||||||
|
|
||||||
|
<!-- requires fork=yes for:
|
||||||
|
relative File paths to use the specified user.dir
|
||||||
|
classpath to use build/contrib/*.jar
|
||||||
|
-->
|
||||||
|
<sysproperty key="user.dir" value="${build.test}/data"/>
|
||||||
|
|
||||||
|
<sysproperty key="fs.default.name" value="${fs.default.name}"/>
|
||||||
|
<sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
|
||||||
|
<sysproperty key="test.log.dir" value="${hadoop.log.dir}"/>
|
||||||
|
<classpath refid="test.classpath"/>
|
||||||
|
<formatter type="${test.junit.output.format}" />
|
||||||
|
<batchtest todir="${build.test}" unless="testcase">
|
||||||
|
<fileset dir="${src.test}"
|
||||||
|
includes="**/Test*.java" excludes="**/${test.exclude}.java" />
|
||||||
|
</batchtest>
|
||||||
|
<batchtest todir="${build.test}" if="testcase">
|
||||||
|
<fileset dir="${src.test}" includes="**/${testcase}.java"/>
|
||||||
|
</batchtest>
|
||||||
|
</junit>
|
||||||
|
<fail if="tests.failed">Tests failed!</fail>
|
||||||
|
|
||||||
|
</target>
|
||||||
|
|
||||||
|
<!-- ================================================================== -->
|
||||||
|
<!-- Clean. Delete the build files, and their directories -->
|
||||||
|
<!-- ================================================================== -->
|
||||||
|
<target name="clean">
|
||||||
|
<delete dir="${build.dir}"/>
|
||||||
|
</target>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -221,12 +221,6 @@
|
||||||
such as hlog.
|
such as hlog.
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
|
||||||
<name>hbase.hstore.blockCache.blockSize</name>
|
|
||||||
<value>65536</value>
|
|
||||||
<description>The size of each block in any block caches.
|
|
||||||
</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<!-- HbaseShell Configurations -->
|
<!-- HbaseShell Configurations -->
|
||||||
<property>
|
<property>
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.TextSequence;
|
||||||
public class HColumnDescriptor implements WritableComparable {
|
public class HColumnDescriptor implements WritableComparable {
|
||||||
|
|
||||||
// For future backward compatibility
|
// For future backward compatibility
|
||||||
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)2;
|
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)1;
|
||||||
|
|
||||||
/** Legal family names can only contain 'word characters' and end in a colon. */
|
/** Legal family names can only contain 'word characters' and end in a colon. */
|
||||||
public static final Pattern LEGAL_FAMILY_NAME = Pattern.compile("\\w+:");
|
public static final Pattern LEGAL_FAMILY_NAME = Pattern.compile("\\w+:");
|
||||||
|
@ -76,11 +76,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
*/
|
*/
|
||||||
public static final boolean DEFAULT_IN_MEMORY = false;
|
public static final boolean DEFAULT_IN_MEMORY = false;
|
||||||
|
|
||||||
/**
|
|
||||||
* Default setting for whether to use a block cache or not.
|
|
||||||
*/
|
|
||||||
public static final boolean DEFAULT_BLOCK_CACHE_ENABLED = false;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default maximum length of cell contents.
|
* Default maximum length of cell contents.
|
||||||
*/
|
*/
|
||||||
|
@ -100,8 +95,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
private CompressionType compressionType;
|
private CompressionType compressionType;
|
||||||
// Serve reads from in-memory cache
|
// Serve reads from in-memory cache
|
||||||
private boolean inMemory;
|
private boolean inMemory;
|
||||||
// Serve reads from in-memory block cache
|
|
||||||
private boolean blockCacheEnabled;
|
|
||||||
// Maximum value size
|
// Maximum value size
|
||||||
private int maxValueLength;
|
private int maxValueLength;
|
||||||
// True if bloom filter was specified
|
// True if bloom filter was specified
|
||||||
|
@ -130,7 +123,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
this(columnName == null || columnName.length() <= 0?
|
this(columnName == null || columnName.length() <= 0?
|
||||||
new Text(): new Text(columnName),
|
new Text(): new Text(columnName),
|
||||||
DEFAULT_N_VERSIONS, DEFAULT_COMPRESSION_TYPE, DEFAULT_IN_MEMORY,
|
DEFAULT_N_VERSIONS, DEFAULT_COMPRESSION_TYPE, DEFAULT_IN_MEMORY,
|
||||||
DEFAULT_BLOCK_CACHE_ENABLED,
|
|
||||||
Integer.MAX_VALUE, DEFAULT_BLOOM_FILTER_DESCRIPTOR);
|
Integer.MAX_VALUE, DEFAULT_BLOOM_FILTER_DESCRIPTOR);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -142,7 +134,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
* @param compression Compression type
|
* @param compression Compression type
|
||||||
* @param inMemory If true, column data should be kept in an HRegionServer's
|
* @param inMemory If true, column data should be kept in an HRegionServer's
|
||||||
* cache
|
* cache
|
||||||
* @param blockCacheEnabled If true, MapFile blocks should be cached
|
|
||||||
* @param maxValueLength Restrict values to <= this value
|
* @param maxValueLength Restrict values to <= this value
|
||||||
* @param bloomFilter Enable the specified bloom filter for this column
|
* @param bloomFilter Enable the specified bloom filter for this column
|
||||||
*
|
*
|
||||||
|
@ -153,7 +144,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
*/
|
*/
|
||||||
public HColumnDescriptor(final Text name, final int maxVersions,
|
public HColumnDescriptor(final Text name, final int maxVersions,
|
||||||
final CompressionType compression, final boolean inMemory,
|
final CompressionType compression, final boolean inMemory,
|
||||||
final boolean blockCacheEnabled,
|
|
||||||
final int maxValueLength, final BloomFilterDescriptor bloomFilter) {
|
final int maxValueLength, final BloomFilterDescriptor bloomFilter) {
|
||||||
String familyStr = name.toString();
|
String familyStr = name.toString();
|
||||||
// Test name if not null (It can be null when deserializing after
|
// Test name if not null (It can be null when deserializing after
|
||||||
|
@ -175,7 +165,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
}
|
}
|
||||||
this.maxVersions = maxVersions;
|
this.maxVersions = maxVersions;
|
||||||
this.inMemory = inMemory;
|
this.inMemory = inMemory;
|
||||||
this.blockCacheEnabled = blockCacheEnabled;
|
|
||||||
this.maxValueLength = maxValueLength;
|
this.maxValueLength = maxValueLength;
|
||||||
this.bloomFilter = bloomFilter;
|
this.bloomFilter = bloomFilter;
|
||||||
this.bloomFilterSpecified = this.bloomFilter == null ? false : true;
|
this.bloomFilterSpecified = this.bloomFilter == null ? false : true;
|
||||||
|
@ -223,13 +212,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
return this.inMemory;
|
return this.inMemory;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return True if MapFile blocks should be cached.
|
|
||||||
*/
|
|
||||||
public boolean isBlockCacheEnabled() {
|
|
||||||
return blockCacheEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Maximum value length.
|
* @return Maximum value length.
|
||||||
*/
|
*/
|
||||||
|
@ -252,7 +234,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
return "{name: " + tmp.substring(0, tmp.length() - 1) +
|
return "{name: " + tmp.substring(0, tmp.length() - 1) +
|
||||||
", max versions: " + maxVersions +
|
", max versions: " + maxVersions +
|
||||||
", compression: " + this.compressionType + ", in memory: " + inMemory +
|
", compression: " + this.compressionType + ", in memory: " + inMemory +
|
||||||
", block cache enabled: " + blockCacheEnabled +
|
|
||||||
", max length: " + maxValueLength + ", bloom filter: " +
|
", max length: " + maxValueLength + ", bloom filter: " +
|
||||||
(bloomFilterSpecified ? bloomFilter.toString() : "none") + "}";
|
(bloomFilterSpecified ? bloomFilter.toString() : "none") + "}";
|
||||||
}
|
}
|
||||||
|
@ -270,7 +251,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
result ^= Integer.valueOf(this.maxVersions).hashCode();
|
result ^= Integer.valueOf(this.maxVersions).hashCode();
|
||||||
result ^= this.compressionType.hashCode();
|
result ^= this.compressionType.hashCode();
|
||||||
result ^= Boolean.valueOf(this.inMemory).hashCode();
|
result ^= Boolean.valueOf(this.inMemory).hashCode();
|
||||||
result ^= Boolean.valueOf(this.blockCacheEnabled).hashCode();
|
|
||||||
result ^= Integer.valueOf(this.maxValueLength).hashCode();
|
result ^= Integer.valueOf(this.maxValueLength).hashCode();
|
||||||
result ^= Boolean.valueOf(this.bloomFilterSpecified).hashCode();
|
result ^= Boolean.valueOf(this.bloomFilterSpecified).hashCode();
|
||||||
result ^= Byte.valueOf(this.versionNumber).hashCode();
|
result ^= Byte.valueOf(this.versionNumber).hashCode();
|
||||||
|
@ -297,10 +277,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
bloomFilter = new BloomFilterDescriptor();
|
bloomFilter = new BloomFilterDescriptor();
|
||||||
bloomFilter.readFields(in);
|
bloomFilter.readFields(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.versionNumber > 1) {
|
|
||||||
this.blockCacheEnabled = in.readBoolean();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** {@inheritDoc} */
|
/** {@inheritDoc} */
|
||||||
|
@ -316,8 +292,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
if(bloomFilterSpecified) {
|
if(bloomFilterSpecified) {
|
||||||
bloomFilter.write(out);
|
bloomFilter.write(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
out.writeBoolean(this.blockCacheEnabled);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Comparable
|
// Comparable
|
||||||
|
@ -353,18 +327,6 @@ public class HColumnDescriptor implements WritableComparable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(result == 0) {
|
|
||||||
if(this.blockCacheEnabled == other.blockCacheEnabled) {
|
|
||||||
result = 0;
|
|
||||||
|
|
||||||
} else if(this.blockCacheEnabled) {
|
|
||||||
result = -1;
|
|
||||||
|
|
||||||
} else {
|
|
||||||
result = 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(result == 0) {
|
if(result == 0) {
|
||||||
result = other.maxValueLength - this.maxValueLength;
|
result = other.maxValueLength - this.maxValueLength;
|
||||||
}
|
}
|
||||||
|
|
|
@ -741,19 +741,9 @@ public class HStore implements HConstants {
|
||||||
|
|
||||||
// Finally, start up all the map readers! (There could be more than one
|
// Finally, start up all the map readers! (There could be more than one
|
||||||
// since we haven't compacted yet.)
|
// since we haven't compacted yet.)
|
||||||
boolean first = true;
|
|
||||||
for(Map.Entry<Long, HStoreFile> e: this.storefiles.entrySet()) {
|
for(Map.Entry<Long, HStoreFile> e: this.storefiles.entrySet()) {
|
||||||
if (first) {
|
this.readers.put(e.getKey(),
|
||||||
// Use a block cache (if configured) for the first reader only
|
e.getValue().getReader(this.fs, this.bloomFilter));
|
||||||
// so as to control memory usage.
|
|
||||||
this.readers.put(e.getKey(),
|
|
||||||
e.getValue().getReader(this.fs, this.bloomFilter,
|
|
||||||
family.isBlockCacheEnabled()));
|
|
||||||
first = false;
|
|
||||||
} else {
|
|
||||||
this.readers.put(e.getKey(),
|
|
||||||
e.getValue().getReader(this.fs, this.bloomFilter));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1570,10 +1560,7 @@ public class HStore implements HConstants {
|
||||||
// 6. Loading the new TreeMap.
|
// 6. Loading the new TreeMap.
|
||||||
Long orderVal = Long.valueOf(finalCompactedFile.loadInfo(fs));
|
Long orderVal = Long.valueOf(finalCompactedFile.loadInfo(fs));
|
||||||
this.readers.put(orderVal,
|
this.readers.put(orderVal,
|
||||||
// Use a block cache (if configured) for this reader since
|
finalCompactedFile.getReader(this.fs, this.bloomFilter));
|
||||||
// it is the only one.
|
|
||||||
finalCompactedFile.getReader(this.fs, this.bloomFilter,
|
|
||||||
family.isBlockCacheEnabled()));
|
|
||||||
this.storefiles.put(orderVal, finalCompactedFile);
|
this.storefiles.put(orderVal, finalCompactedFile);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e = RemoteExceptionHandler.checkIOException(e);
|
e = RemoteExceptionHandler.checkIOException(e);
|
||||||
|
|
|
@ -31,11 +31,9 @@ import java.util.Random;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.io.BlockFSInputStream;
|
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.MapFile;
|
import org.apache.hadoop.io.MapFile;
|
||||||
|
@ -415,37 +413,17 @@ public class HStoreFile implements HConstants {
|
||||||
* @return MapFile.Reader
|
* @return MapFile.Reader
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public MapFile.Reader getReader(final FileSystem fs,
|
|
||||||
final Filter bloomFilter)
|
|
||||||
throws IOException {
|
|
||||||
return isReference()?
|
|
||||||
new HStoreFile.HalfMapFileReader(fs, getMapFilePath(reference).toString(),
|
|
||||||
conf, reference.getFileRegion(), reference.getMidkey(), bloomFilter):
|
|
||||||
new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
|
|
||||||
conf, bloomFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get reader for the store file map file.
|
|
||||||
* Client is responsible for closing file when done.
|
|
||||||
* @param fs
|
|
||||||
* @param bloomFilter If null, no filtering is done.
|
|
||||||
* @param blockCacheEnabled If true, MapFile blocks should be cached.
|
|
||||||
* @return MapFile.Reader
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public synchronized MapFile.Reader getReader(final FileSystem fs,
|
public synchronized MapFile.Reader getReader(final FileSystem fs,
|
||||||
final Filter bloomFilter, final boolean blockCacheEnabled)
|
final Filter bloomFilter)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
if (isReference()) {
|
if (isReference()) {
|
||||||
return new HStoreFile.HalfMapFileReader(fs,
|
return new HStoreFile.HalfMapFileReader(fs,
|
||||||
getMapFilePath(reference).toString(), conf,
|
getMapFilePath(reference).toString(), conf,
|
||||||
reference.getFileRegion(), reference.getMidkey(), bloomFilter,
|
reference.getFileRegion(), reference.getMidkey(), bloomFilter);
|
||||||
blockCacheEnabled);
|
|
||||||
}
|
}
|
||||||
return new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
|
return new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
|
||||||
conf, bloomFilter, blockCacheEnabled);
|
conf, bloomFilter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -606,13 +584,8 @@ public class HStoreFile implements HConstants {
|
||||||
*/
|
*/
|
||||||
static class HbaseMapFile extends MapFile {
|
static class HbaseMapFile extends MapFile {
|
||||||
|
|
||||||
/**
|
|
||||||
* A reader capable of reading and caching blocks of the data file.
|
|
||||||
*/
|
|
||||||
static class HbaseReader extends MapFile.Reader {
|
static class HbaseReader extends MapFile.Reader {
|
||||||
|
|
||||||
private final boolean blockCacheEnabled;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param fs
|
* @param fs
|
||||||
* @param dirName
|
* @param dirName
|
||||||
|
@ -621,23 +594,7 @@ public class HStoreFile implements HConstants {
|
||||||
*/
|
*/
|
||||||
public HbaseReader(FileSystem fs, String dirName, Configuration conf)
|
public HbaseReader(FileSystem fs, String dirName, Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(fs, dirName, conf, false);
|
super(fs, dirName, conf);
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param fs
|
|
||||||
* @param dirName
|
|
||||||
* @param conf
|
|
||||||
* @param blockCacheEnabled
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public HbaseReader(FileSystem fs, String dirName, Configuration conf,
|
|
||||||
boolean blockCacheEnabled)
|
|
||||||
throws IOException {
|
|
||||||
super(fs, dirName, null, conf, false); // defer opening streams
|
|
||||||
this.blockCacheEnabled = blockCacheEnabled;
|
|
||||||
open(fs, dirName, null, conf);
|
|
||||||
|
|
||||||
// Force reading of the mapfile index by calling midKey.
|
// Force reading of the mapfile index by calling midKey.
|
||||||
// Reading the index will bring the index into memory over
|
// Reading the index will bring the index into memory over
|
||||||
// here on the client and then close the index file freeing
|
// here on the client and then close the index file freeing
|
||||||
|
@ -648,28 +605,6 @@ public class HStoreFile implements HConstants {
|
||||||
// using up datanode resources. See HADOOP-2341.
|
// using up datanode resources. See HADOOP-2341.
|
||||||
midKey();
|
midKey();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected org.apache.hadoop.io.SequenceFile.Reader createDataFileReader(
|
|
||||||
FileSystem fs, Path dataFile, Configuration conf)
|
|
||||||
throws IOException {
|
|
||||||
if (!blockCacheEnabled) {
|
|
||||||
return super.createDataFileReader(fs, dataFile, conf);
|
|
||||||
}
|
|
||||||
LOG.info("Block Cache enabled");
|
|
||||||
final int blockSize = conf.getInt("hbase.hstore.blockCache.blockSize",
|
|
||||||
64 * 1024);
|
|
||||||
return new SequenceFile.Reader(fs, dataFile, conf) {
|
|
||||||
@Override
|
|
||||||
protected FSDataInputStream openFile(FileSystem fs, Path file,
|
|
||||||
int bufferSize, long length) throws IOException {
|
|
||||||
|
|
||||||
return new FSDataInputStream(new BlockFSInputStream(
|
|
||||||
super.openFile(fs, file, bufferSize, length), length,
|
|
||||||
blockSize));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class HbaseWriter extends MapFile.Writer {
|
static class HbaseWriter extends MapFile.Writer {
|
||||||
|
@ -718,13 +653,6 @@ public class HStoreFile implements HConstants {
|
||||||
bloomFilter = filter;
|
bloomFilter = filter;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Reader(FileSystem fs, String dirName, Configuration conf,
|
|
||||||
final Filter filter, final boolean blockCacheEnabled)
|
|
||||||
throws IOException {
|
|
||||||
super(fs, dirName, conf, blockCacheEnabled);
|
|
||||||
bloomFilter = filter;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** {@inheritDoc} */
|
/** {@inheritDoc} */
|
||||||
@Override
|
@Override
|
||||||
public Writable get(WritableComparable key, Writable val)
|
public Writable get(WritableComparable key, Writable val)
|
||||||
|
@ -817,7 +745,7 @@ public class HStoreFile implements HConstants {
|
||||||
final Configuration conf, final Range r,
|
final Configuration conf, final Range r,
|
||||||
final WritableComparable midKey)
|
final WritableComparable midKey)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(fs, dirName, conf, r, midKey, null, false);
|
this(fs, dirName, conf, r, midKey, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
HalfMapFileReader(final FileSystem fs, final String dirName,
|
HalfMapFileReader(final FileSystem fs, final String dirName,
|
||||||
|
@ -829,16 +757,6 @@ public class HStoreFile implements HConstants {
|
||||||
midkey = midKey;
|
midkey = midKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
HalfMapFileReader(final FileSystem fs, final String dirName,
|
|
||||||
final Configuration conf, final Range r,
|
|
||||||
final WritableComparable midKey, final Filter filter,
|
|
||||||
final boolean blockCacheEnabled)
|
|
||||||
throws IOException {
|
|
||||||
super(fs, dirName, conf, filter, blockCacheEnabled);
|
|
||||||
top = isTopFileRegion(r);
|
|
||||||
midkey = midKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private void checkKey(final WritableComparable key)
|
private void checkKey(final WritableComparable key)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
|
@ -43,15 +43,15 @@ public class HTableDescriptor implements WritableComparable {
|
||||||
public static final HTableDescriptor rootTableDesc =
|
public static final HTableDescriptor rootTableDesc =
|
||||||
new HTableDescriptor(HConstants.ROOT_TABLE_NAME,
|
new HTableDescriptor(HConstants.ROOT_TABLE_NAME,
|
||||||
new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
|
new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
|
||||||
HColumnDescriptor.CompressionType.NONE, false, false,
|
HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE,
|
||||||
Integer.MAX_VALUE, null));
|
null));
|
||||||
|
|
||||||
/** table descriptor for meta table */
|
/** table descriptor for meta table */
|
||||||
public static final HTableDescriptor metaTableDesc =
|
public static final HTableDescriptor metaTableDesc =
|
||||||
new HTableDescriptor(HConstants.META_TABLE_NAME,
|
new HTableDescriptor(HConstants.META_TABLE_NAME,
|
||||||
new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
|
new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
|
||||||
HColumnDescriptor.CompressionType.NONE, false, false,
|
HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE,
|
||||||
Integer.MAX_VALUE, null));
|
null));
|
||||||
|
|
||||||
private boolean rootregion;
|
private boolean rootregion;
|
||||||
private boolean metaregion;
|
private boolean metaregion;
|
||||||
|
|
|
@ -202,8 +202,6 @@ public class AlterCommand extends SchemaModificationCommand {
|
||||||
.get(spec)).toUpperCase());
|
.get(spec)).toUpperCase());
|
||||||
} else if (spec.equals("IN_MEMORY")) {
|
} else if (spec.equals("IN_MEMORY")) {
|
||||||
inMemory = (Boolean) columnSpec.get(spec);
|
inMemory = (Boolean) columnSpec.get(spec);
|
||||||
} else if (spec.equals("BLOCK_CACHE_ENABLED")) {
|
|
||||||
blockCacheEnabled = (Boolean) columnSpec.get(spec);
|
|
||||||
} else if (spec.equals("BLOOMFILTER")) {
|
} else if (spec.equals("BLOOMFILTER")) {
|
||||||
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
|
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
|
||||||
.toUpperCase());
|
.toUpperCase());
|
||||||
|
@ -231,8 +229,7 @@ public class AlterCommand extends SchemaModificationCommand {
|
||||||
column = appendDelimiter(column);
|
column = appendDelimiter(column);
|
||||||
|
|
||||||
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
|
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
|
||||||
maxVersions, compression, inMemory, blockCacheEnabled,
|
maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
|
||||||
maxLength, bloomFilterDesc);
|
|
||||||
|
|
||||||
return columnDesc;
|
return columnDesc;
|
||||||
}
|
}
|
||||||
|
@ -246,7 +243,6 @@ public class AlterCommand extends SchemaModificationCommand {
|
||||||
maxLength = original.getMaxValueLength();
|
maxLength = original.getMaxValueLength();
|
||||||
compression = original.getCompression();
|
compression = original.getCompression();
|
||||||
inMemory = original.isInMemory();
|
inMemory = original.isInMemory();
|
||||||
blockCacheEnabled = original.isBlockCacheEnabled();
|
|
||||||
bloomFilterDesc = original.getBloomFilter();
|
bloomFilterDesc = original.getBloomFilter();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ public abstract class SchemaModificationCommand extends BasicCommand {
|
||||||
protected int maxLength;
|
protected int maxLength;
|
||||||
protected HColumnDescriptor.CompressionType compression;
|
protected HColumnDescriptor.CompressionType compression;
|
||||||
protected boolean inMemory;
|
protected boolean inMemory;
|
||||||
protected boolean blockCacheEnabled;
|
|
||||||
protected BloomFilterDescriptor bloomFilterDesc;
|
protected BloomFilterDescriptor bloomFilterDesc;
|
||||||
protected BloomFilterType bloomFilterType;
|
protected BloomFilterType bloomFilterType;
|
||||||
protected int vectorSize;
|
protected int vectorSize;
|
||||||
|
@ -53,7 +52,6 @@ public abstract class SchemaModificationCommand extends BasicCommand {
|
||||||
maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
|
maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
|
||||||
compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;
|
compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;
|
||||||
inMemory = HColumnDescriptor.DEFAULT_IN_MEMORY;
|
inMemory = HColumnDescriptor.DEFAULT_IN_MEMORY;
|
||||||
blockCacheEnabled = HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED;
|
|
||||||
bloomFilterDesc = HColumnDescriptor.DEFAULT_BLOOM_FILTER_DESCRIPTOR;
|
bloomFilterDesc = HColumnDescriptor.DEFAULT_BLOOM_FILTER_DESCRIPTOR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,8 +76,6 @@ public abstract class SchemaModificationCommand extends BasicCommand {
|
||||||
.valueOf(((String) columnSpec.get(spec)).toUpperCase());
|
.valueOf(((String) columnSpec.get(spec)).toUpperCase());
|
||||||
} else if (spec.equals("IN_MEMORY")) {
|
} else if (spec.equals("IN_MEMORY")) {
|
||||||
inMemory = (Boolean) columnSpec.get(spec);
|
inMemory = (Boolean) columnSpec.get(spec);
|
||||||
} else if (spec.equals("BLOCK_CACHE_ENABLED")) {
|
|
||||||
blockCacheEnabled = (Boolean) columnSpec.get(spec);
|
|
||||||
} else if (spec.equals("BLOOMFILTER")) {
|
} else if (spec.equals("BLOOMFILTER")) {
|
||||||
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
|
bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
|
||||||
.toUpperCase());
|
.toUpperCase());
|
||||||
|
@ -107,8 +103,7 @@ public abstract class SchemaModificationCommand extends BasicCommand {
|
||||||
column = appendDelimiter(column);
|
column = appendDelimiter(column);
|
||||||
|
|
||||||
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
|
HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
|
||||||
maxVersions, compression, inMemory, blockCacheEnabled,
|
maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
|
||||||
maxLength, bloomFilterDesc);
|
|
||||||
|
|
||||||
return columnDesc;
|
return columnDesc;
|
||||||
}
|
}
|
||||||
|
|
|
@ -184,11 +184,11 @@ public abstract class HBaseTestCase extends TestCase {
|
||||||
final int versions) {
|
final int versions) {
|
||||||
HTableDescriptor htd = new HTableDescriptor(name);
|
HTableDescriptor htd = new HTableDescriptor(name);
|
||||||
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME1), versions,
|
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME1), versions,
|
||||||
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
|
CompressionType.NONE, false, Integer.MAX_VALUE, null));
|
||||||
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME2), versions,
|
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME2), versions,
|
||||||
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
|
CompressionType.NONE, false, Integer.MAX_VALUE, null));
|
||||||
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME3), versions,
|
htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME3), versions,
|
||||||
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
|
CompressionType.NONE, false, Integer.MAX_VALUE, null));
|
||||||
return htd;
|
return htd;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -169,7 +169,6 @@ public class TestBloomFilters extends HBaseClusterTestCase {
|
||||||
1, // Max versions
|
1, // Max versions
|
||||||
HColumnDescriptor.CompressionType.NONE, // no compression
|
HColumnDescriptor.CompressionType.NONE, // no compression
|
||||||
HColumnDescriptor.DEFAULT_IN_MEMORY, // not in memory
|
HColumnDescriptor.DEFAULT_IN_MEMORY, // not in memory
|
||||||
HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED,
|
|
||||||
HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
|
HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
|
||||||
bloomFilter
|
bloomFilter
|
||||||
)
|
)
|
||||||
|
@ -235,7 +234,6 @@ public class TestBloomFilters extends HBaseClusterTestCase {
|
||||||
1, // Max versions
|
1, // Max versions
|
||||||
HColumnDescriptor.CompressionType.NONE, // no compression
|
HColumnDescriptor.CompressionType.NONE, // no compression
|
||||||
HColumnDescriptor.DEFAULT_IN_MEMORY, // not in memory
|
HColumnDescriptor.DEFAULT_IN_MEMORY, // not in memory
|
||||||
HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED,
|
|
||||||
HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
|
HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
|
||||||
bloomFilter
|
bloomFilter
|
||||||
)
|
)
|
||||||
|
|
|
@ -337,7 +337,7 @@ public class TestTimestamp extends HBaseTestCase {
|
||||||
private HRegion createRegion() throws IOException {
|
private HRegion createRegion() throws IOException {
|
||||||
HTableDescriptor htd = createTableDescriptor(getName());
|
HTableDescriptor htd = createTableDescriptor(getName());
|
||||||
htd.addFamily(new HColumnDescriptor(COLUMN, VERSIONS,
|
htd.addFamily(new HColumnDescriptor(COLUMN, VERSIONS,
|
||||||
CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
|
CompressionType.NONE, false, Integer.MAX_VALUE, null));
|
||||||
return createNewHRegion(htd, null, null);
|
return createNewHRegion(htd, null, null);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -44,9 +44,8 @@ public class TestToString extends TestCase {
|
||||||
HTableDescriptor htd = HTableDescriptor.rootTableDesc;
|
HTableDescriptor htd = HTableDescriptor.rootTableDesc;
|
||||||
System. out.println(htd.toString());
|
System. out.println(htd.toString());
|
||||||
assertEquals("Table descriptor", "name: -ROOT-, families: {info:={name: " +
|
assertEquals("Table descriptor", "name: -ROOT-, families: {info:={name: " +
|
||||||
"info, max versions: 1, compression: NONE, in memory: false, " +
|
"info, max versions: 1, compression: NONE, in memory: false, max " +
|
||||||
"block cache enabled: false, max length: 2147483647, " +
|
"length: 2147483647, bloom filter: none}}", htd.toString());
|
||||||
"bloom filter: none}}", htd.toString());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue