HBASE-1164 Remove HBase private copy of SequenceFile (revert changes)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@739578 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4d56aebba2
commit
0ce43735d1
|
@ -33,7 +33,6 @@ Release 0.20.0 - Unreleased
|
||||||
HBASE-5121 Fix shell usage for format.width
|
HBASE-5121 Fix shell usage for format.width
|
||||||
HBASE-845 HCM.isTableEnabled doesn't really tell if it is, or not
|
HBASE-845 HCM.isTableEnabled doesn't really tell if it is, or not
|
||||||
HBASE-903 [shell] Can't set table descriptor attributes when I alter a table
|
HBASE-903 [shell] Can't set table descriptor attributes when I alter a table
|
||||||
HBASE-1164 Remove HBase private copy of SequenceFile
|
|
||||||
HBASE-1166 saveVersion.sh doesn't work with git (Nitay Joffe via Stack)
|
HBASE-1166 saveVersion.sh doesn't work with git (Nitay Joffe via Stack)
|
||||||
HBASE-1167 JSP doesn't work in a git checkout (Nitay Joffe via Andrew
|
HBASE-1167 JSP doesn't work in a git checkout (Nitay Joffe via Andrew
|
||||||
Purtell)
|
Purtell)
|
||||||
|
|
|
@ -28,10 +28,10 @@ import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.util.Hash;
|
import org.apache.hadoop.hbase.util.Hash;
|
||||||
|
import org.apache.hadoop.hbase.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.io.WritableComparable;
|
import org.apache.hadoop.io.WritableComparable;
|
||||||
import org.onelab.filter.BloomFilter;
|
import org.onelab.filter.BloomFilter;
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.HStoreKey;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
|
@ -89,7 +88,7 @@ public class HBaseMapFile extends MapFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected SequenceFile.Reader createDataFileReader(
|
protected org.apache.hadoop.hbase.io.SequenceFile.Reader createDataFileReader(
|
||||||
FileSystem fs, Path dataFile, Configuration conf)
|
FileSystem fs, Path dataFile, Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
if (!blockCacheEnabled) {
|
if (!blockCacheEnabled) {
|
||||||
|
|
|
@ -24,9 +24,9 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
import org.apache.hadoop.conf.*;
|
import org.apache.hadoop.conf.*;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.apache.hadoop.hbase.io.SequenceFile.CompressionType;
|
||||||
import org.apache.hadoop.io.DataInputBuffer;
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
|
@ -52,7 +52,7 @@ import org.apache.hadoop.io.compress.DefaultCodec;
|
||||||
* database, perform updates by copying the previous version of a database and
|
* database, perform updates by copying the previous version of a database and
|
||||||
* merging in a sorted change list, to create a new version of the database in
|
* merging in a sorted change list, to create a new version of the database in
|
||||||
* a new file. Sorting large change lists can be done with {@link
|
* a new file. Sorting large change lists can be done with {@link
|
||||||
* org.apache.hadoop.io.SequenceFile.Sorter}.
|
* SequenceFile.Sorter}.
|
||||||
*/
|
*/
|
||||||
public class MapFile {
|
public class MapFile {
|
||||||
private static final Log LOG = LogFactory.getLog(MapFile.class);
|
private static final Log LOG = LogFactory.getLog(MapFile.class);
|
||||||
|
@ -95,7 +95,7 @@ public class MapFile {
|
||||||
/** Create the named map for keys of the named class. */
|
/** Create the named map for keys of the named class. */
|
||||||
public Writer(Configuration conf, FileSystem fs, String dirName,
|
public Writer(Configuration conf, FileSystem fs, String dirName,
|
||||||
Class<? extends WritableComparable> keyClass, Class valClass,
|
Class<? extends WritableComparable> keyClass, Class valClass,
|
||||||
SequenceFile.CompressionType compress, Progressable progress)
|
CompressionType compress, Progressable progress)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(conf, fs, dirName, WritableComparator.get(keyClass), valClass,
|
this(conf, fs, dirName, WritableComparator.get(keyClass), valClass,
|
||||||
compress, progress);
|
compress, progress);
|
||||||
|
@ -104,7 +104,7 @@ public class MapFile {
|
||||||
/** Create the named map for keys of the named class. */
|
/** Create the named map for keys of the named class. */
|
||||||
public Writer(Configuration conf, FileSystem fs, String dirName,
|
public Writer(Configuration conf, FileSystem fs, String dirName,
|
||||||
Class<? extends WritableComparable> keyClass, Class valClass,
|
Class<? extends WritableComparable> keyClass, Class valClass,
|
||||||
SequenceFile.CompressionType compress, CompressionCodec codec,
|
CompressionType compress, CompressionCodec codec,
|
||||||
Progressable progress)
|
Progressable progress)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(conf, fs, dirName, WritableComparator.get(keyClass), valClass,
|
this(conf, fs, dirName, WritableComparator.get(keyClass), valClass,
|
||||||
|
@ -114,7 +114,7 @@ public class MapFile {
|
||||||
/** Create the named map for keys of the named class. */
|
/** Create the named map for keys of the named class. */
|
||||||
public Writer(Configuration conf, FileSystem fs, String dirName,
|
public Writer(Configuration conf, FileSystem fs, String dirName,
|
||||||
Class<? extends WritableComparable> keyClass, Class valClass,
|
Class<? extends WritableComparable> keyClass, Class valClass,
|
||||||
SequenceFile.CompressionType compress)
|
CompressionType compress)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(conf, fs, dirName, WritableComparator.get(keyClass), valClass, compress);
|
this(conf, fs, dirName, WritableComparator.get(keyClass), valClass, compress);
|
||||||
}
|
}
|
||||||
|
@ -168,7 +168,7 @@ public class MapFile {
|
||||||
this.index =
|
this.index =
|
||||||
SequenceFile.createWriter
|
SequenceFile.createWriter
|
||||||
(fs, conf, indexFile, keyClass, LongWritable.class,
|
(fs, conf, indexFile, keyClass, LongWritable.class,
|
||||||
SequenceFile.CompressionType.BLOCK, progress);
|
CompressionType.BLOCK, progress);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** The number of entries that are added before an index entry is added.*/
|
/** The number of entries that are added before an index entry is added.*/
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -43,7 +43,6 @@ import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
@ -59,6 +58,7 @@ import org.apache.hadoop.hbase.io.Reference;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.FSUtils;
|
import org.apache.hadoop.hbase.util.FSUtils;
|
||||||
import org.apache.hadoop.hbase.io.MapFile;
|
import org.apache.hadoop.hbase.io.MapFile;
|
||||||
|
import org.apache.hadoop.hbase.io.SequenceFile;
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
@ -39,6 +38,7 @@ import org.apache.hadoop.hbase.io.HalfMapFileReader;
|
||||||
import org.apache.hadoop.hbase.io.Reference;
|
import org.apache.hadoop.hbase.io.Reference;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.io.MapFile;
|
import org.apache.hadoop.hbase.io.MapFile;
|
||||||
|
import org.apache.hadoop.hbase.io.SequenceFile;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A HStore data file. HStores usually have one or more of these files. They
|
* A HStore data file. HStores usually have one or more of these files. They
|
||||||
|
|
|
@ -24,7 +24,6 @@ import java.io.IOException;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
@ -32,6 +31,7 @@ import org.apache.hadoop.hbase.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.io.HalfMapFileReader;
|
import org.apache.hadoop.hbase.io.HalfMapFileReader;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.io.MapFile;
|
import org.apache.hadoop.hbase.io.MapFile;
|
||||||
|
import org.apache.hadoop.hbase.io.SequenceFile;
|
||||||
import org.apache.hadoop.hbase.io.Reference;
|
import org.apache.hadoop.hbase.io.Reference;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
|
Loading…
Reference in New Issue