HBASE-HBASE-8054 HBASE-7797 Use consistent package name dregs

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1455672 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-03-12 19:07:34 +00:00
parent 079f99412c
commit 39a02611b5
83 changed files with 252 additions and 249 deletions

View File

@ -41,7 +41,7 @@ public enum DataBlockEncoding {
FAST_DIFF(4, "org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"),
// id 5 is reserved for the COPY_KEY algorithm for benchmarking
// COPY_KEY(5, "org.apache.hadoop.hbase.io.encoding.CopyKeyDataBlockEncoder"),
PREFIX_TREE(6, "org.apache.hbase.codec.prefixtree.PrefixTreeCodec");
PREFIX_TREE(6, "org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec");
private final short id;
private final byte[] idInBytes;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree;
package org.apache.hadoop.hbase.codec.prefixtree;
import java.io.IOException;
import java.io.InputStream;
@ -24,10 +24,10 @@ import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hbase.util.vint.UVIntTool;
import org.apache.hbase.util.vint.UVLongTool;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
import org.apache.hadoop.hbase.util.vint.UVLongTool;
/**
* Information about the block. Stored at the beginning of the byte[]. Contains things

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree;
package org.apache.hadoop.hbase.codec.prefixtree;
import java.io.DataInputStream;
import java.io.DataOutputStream;
@ -29,6 +29,11 @@ import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.KeyValue.MetaKeyComparator;
import org.apache.hadoop.hbase.KeyValue.RootKeyComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hadoop.hbase.codec.prefixtree.encode.EncoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@ -39,11 +44,6 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.io.RawComparator;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hbase.codec.prefixtree.encode.EncoderFactory;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
/**
* This class is created via reflection in DataBlockEncoding enum. Update the enum if class name or
@ -52,7 +52,7 @@ import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
* PrefixTreeDataBlockEncoder implementation of DataBlockEncoder. This is the primary entry point
* for PrefixTree encoding and decoding. Encoding is delegated to instances of
* {@link PrefixTreeEncoder}, and decoding is delegated to instances of
* {@link org.apache.hbase.codec.prefixtree.scanner.CellSearcher}. Encoder and decoder instances are
* {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher}. Encoder and decoder instances are
* created and recycled by static PtEncoderFactory and PtDecoderFactory.
*/
@InterfaceAudience.Private

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree;
package org.apache.hadoop.hbase.codec.prefixtree;
import java.nio.ByteBuffer;
@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
/**
* These methods have the same definition as any implementation of the EncodedSeeker.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode;
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import java.nio.ByteBuffer;
import java.util.Queue;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode;
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
/**
* Static wrapper class for the ArraySearcherPool.

View File

@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode;
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.ReversibleCellScanner;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.ReversibleCellScanner;
/**
* Methods for going backwards through a PrefixTree block. This class is split out on its own to

View File

@ -16,17 +16,17 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode;
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hbase.codec.prefixtree.decode.row.RowNodeReader;
import org.apache.hbase.codec.prefixtree.decode.timestamp.MvccVersionDecoder;
import org.apache.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hadoop.hbase.codec.prefixtree.decode.row.RowNodeReader;
import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.MvccVersionDecoder;
import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
/**
* Extends PtCell and manipulates its protected fields. Could alternatively contain a PtCell and

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode;
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import com.google.common.primitives.UnsignedBytes;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode;
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode.column;
package org.apache.hadoop.hbase.codec.prefixtree.decode.column;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hbase.util.vint.UVIntTool;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
@InterfaceAudience.Private
public class ColumnNodeReader {

View File

@ -16,10 +16,10 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode.column;
package org.apache.hadoop.hbase.codec.prefixtree.decode.column;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
/**
* Position one of these appropriately in the data block and you can call its methods to retrieve

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode.row;
package org.apache.hadoop.hbase.codec.prefixtree.decode.row;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hbase.util.vint.UVIntTool;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
/**
* Position one of these appropriately in the data block and you can call its methods to retrieve

View File

@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode.timestamp;
package org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
/**
* Given a block and its blockMeta, this will decode the MvccVersion for the i-th Cell in the block.

View File

@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.decode.timestamp;
package org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
/**
* Given a block and its blockMeta, this will decode the timestamp for the i-th Cell in the block.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode;
package org.apache.hadoop.hbase.codec.prefixtree.encode;
import java.io.OutputStream;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode;
package org.apache.hadoop.hbase.codec.prefixtree.encode;
import java.io.OutputStream;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode;
package org.apache.hadoop.hbase.codec.prefixtree.encode;
import java.io.IOException;
import java.io.OutputStream;
@ -27,20 +27,20 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.CellTypeEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowSectionWriter;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.byterange.ByteRangeSet;
import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeHashSet;
import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.other.CellTypeEncoder;
import org.apache.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hbase.codec.prefixtree.encode.row.RowSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hbase.util.byterange.ByteRangeSet;
import org.apache.hbase.util.byterange.impl.ByteRangeHashSet;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.apache.hbase.util.vint.UFIntTool;
/**
* This is the primary class for converting a CellOutputStream into an encoded byte[]. As Cells are

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode;
package org.apache.hadoop.hbase.codec.prefixtree.encode;
import java.io.OutputStream;

View File

@ -16,19 +16,19 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.column;
package org.apache.hadoop.hbase.codec.prefixtree.encode.column;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hbase.util.vint.UVIntTool;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
/**
* Column nodes can be either family nodes or qualifier nodes, as both sections encode similarly.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.column;
package org.apache.hadoop.hbase.codec.prefixtree.encode.column;
import java.io.IOException;
import java.io.OutputStream;
@ -24,11 +24,11 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.other;
package org.apache.hadoop.hbase.codec.prefixtree.encode.other;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.other;
package org.apache.hadoop.hbase.codec.prefixtree.encode.other;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -27,7 +27,7 @@ import java.util.HashSet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import com.google.common.base.Joiner;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.row;
package org.apache.hadoop.hbase.codec.prefixtree.encode.row;
import java.io.IOException;
import java.io.OutputStream;
@ -25,13 +25,13 @@ import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.util.ByteRangeTool;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hbase.util.vint.UVIntTool;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
/**
* Serializes the fields comprising one node of the row trie, which can be a branch, nub, or leaf.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.row;
package org.apache.hadoop.hbase.codec.prefixtree.encode.row;
import java.io.IOException;
import java.io.OutputStream;
@ -24,10 +24,10 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.util.vint.UFIntTool;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.tokenize;
package org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize;
import java.util.Comparator;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.tokenize;
package org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize;
import java.util.ArrayList;
import java.util.List;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.tokenize;
package org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize;
import java.util.ArrayList;
import java.util.List;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.tokenize;
package org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.encode.tokenize;
package org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.scanner;
package org.apache.hadoop.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.scanner;
package org.apache.hadoop.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.scanner;
package org.apache.hadoop.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellScanner;
@ -38,7 +38,7 @@ public interface ReversibleCellScanner extends CellScanner {
* Cell.<br/>
* false if there were no previous cells, meaning getCurrentCell() will return null.
* Scanner position will be
* {@link org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition#BEFORE_FIRST}
* {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition#BEFORE_FIRST}
*/
boolean previous();
@ -49,7 +49,7 @@ public interface ReversibleCellScanner extends CellScanner {
* Cell.<br/>
* false if there were no previous cells, meaning getCurrentCell() will return null.
* Scanner position will be
* {@link org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition#BEFORE_FIRST}
* {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition#BEFORE_FIRST}
*/
boolean previousRow(boolean endOfRow);
}

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.byterange;
package org.apache.hadoop.hbase.util.byterange;
import java.util.ArrayList;
import java.util.List;
@ -33,8 +33,8 @@ import com.google.common.collect.Lists;
* Performance oriented class for de-duping and storing arbitrary byte[]'s arriving in non-sorted
* order. Appends individual byte[]'s to a single big byte[] to avoid overhead and garbage.
* <p>
* Current implementations are {@link org.apache.hbase.util.byterange.impl.ByteRangeHashSet} and
* {@link org.apache.hbase.util.byterange.impl.ByteRangeTreeSet}, but other options might be a
* Current implementations are {@link org.apache.hadoop.hbase.util.byterange.impl.ByteRangeHashSet} and
* {@link org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet}, but other options might be a
* trie-oriented ByteRangeTrieSet, etc
*/
@InterfaceAudience.Private

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.byterange.impl;
package org.apache.hadoop.hbase.util.byterange.impl;
import java.util.Collections;
import java.util.HashMap;
@ -26,7 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hbase.util.byterange.ByteRangeSet;
import org.apache.hadoop.hbase.util.byterange.ByteRangeSet;
/**
* This is probably the best implementation of ByteRangeSet at the moment, though a HashMap produces

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.byterange.impl;
package org.apache.hadoop.hbase.util.byterange.impl;
import java.util.List;
import java.util.TreeMap;
@ -25,7 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hbase.util.byterange.ByteRangeSet;
import org.apache.hadoop.hbase.util.byterange.ByteRangeSet;
/**
* Not currently used in production, but here as a benchmark comparison against ByteRangeHashSet.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.vint;
package org.apache.hadoop.hbase.util.vint;
import java.io.IOException;
import java.io.OutputStream;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.vint;
package org.apache.hadoop.hbase.util.vint;
import java.io.IOException;
import java.io.InputStream;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.vint;
package org.apache.hadoop.hbase.util.vint;
import java.io.IOException;
import java.io.InputStream;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.keyvalue;
package org.apache.hadoop.hbase.codec.keyvalue;
import java.nio.ByteBuffer;
import java.util.Collection;
@ -24,7 +24,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hbase.codec.prefixtree.row.TestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.row.TestRowData;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree;
package org.apache.hadoop.hbase.codec.prefixtree;
import org.apache.hadoop.hbase.util.Bytes;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.blockmeta;
package org.apache.hadoop.hbase.codec.prefixtree.blockmeta;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.junit.Assert;
import org.junit.Test;

View File

@ -16,16 +16,16 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.builder;
package org.apache.hadoop.hbase.codec.prefixtree.builder;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerRowSearchResult;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerRowSearchResult;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.builder;
package org.apache.hadoop.hbase.codec.prefixtree.builder;
import java.util.Collection;
import java.util.List;
import org.apache.hbase.codec.prefixtree.builder.data.TestTokenizerDataBasic;
import org.apache.hbase.codec.prefixtree.builder.data.TestTokenizerDataEdgeCase;
import org.apache.hadoop.hbase.codec.prefixtree.builder.data.TestTokenizerDataBasic;
import org.apache.hadoop.hbase.codec.prefixtree.builder.data.TestTokenizerDataEdgeCase;
import com.google.common.collect.Lists;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.builder;
package org.apache.hadoop.hbase.codec.prefixtree.builder;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.junit.Assert;
import org.junit.Test;
import org.mortbay.log.Log;

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.builder.data;
package org.apache.hadoop.hbase.codec.prefixtree.builder.data;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.builder.TestTokenizerData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.builder.TestTokenizerData;
import com.google.common.collect.Lists;

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.builder.data;
package org.apache.hadoop.hbase.codec.prefixtree.builder.data;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.builder.TestTokenizerData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.builder.TestTokenizerData;
import com.google.common.collect.Lists;

View File

@ -16,22 +16,22 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.column;
package org.apache.hadoop.hbase.codec.prefixtree.column;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.ByteRangeTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.column;
package org.apache.hadoop.hbase.codec.prefixtree.column;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.column.data.TestColumnDataRandom;
import org.apache.hadoop.hbase.codec.prefixtree.column.data.TestColumnDataSimple;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hbase.codec.prefixtree.column.data.TestColumnDataRandom;
import org.apache.hbase.codec.prefixtree.column.data.TestColumnDataSimple;
import com.google.common.collect.Lists;

View File

@ -16,16 +16,16 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.column.data;
package org.apache.hadoop.hbase.codec.prefixtree.column.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.column.TestColumnData;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.byterange.ByteRangeSet;
import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
import org.apache.hbase.codec.prefixtree.column.TestColumnData;
import org.apache.hbase.util.byterange.ByteRangeSet;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.column.data;
package org.apache.hadoop.hbase.codec.prefixtree.column.data;
import java.util.List;
import org.apache.hadoop.hbase.codec.prefixtree.column.TestColumnData;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.ByteRangeTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.column.TestColumnData;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row;
package org.apache.hadoop.hbase.codec.prefixtree.row;
import java.util.List;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row;
package org.apache.hadoop.hbase.codec.prefixtree.row;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -28,11 +28,11 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -16,29 +16,29 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row;
package org.apache.hadoop.hbase.codec.prefixtree.row;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataComplexQualifiers;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataDeeper;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataDifferentTimestamps;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataEmpty;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataExerciseFInts;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataNub;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataNumberStrings;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataQualifierByteOrdering;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataRandomKeyValues;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataSearcherRowMiss;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataSimple;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataSingleQualifier;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataTrivial;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataUrls;
import org.apache.hbase.codec.prefixtree.row.data.TestRowDataUrlsExample;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataComplexQualifiers;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataDeeper;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataDifferentTimestamps;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataEmpty;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataExerciseFInts;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataNub;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataNumberStrings;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataQualifierByteOrdering;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataRandomKeyValues;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSearcherRowMiss;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSimple;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSingleQualifier;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataTrivial;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataUrls;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataUrlsExample;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row;
package org.apache.hadoop.hbase.codec.prefixtree.row;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -28,10 +28,10 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,16 +16,16 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.junit.Assert;
import com.google.common.collect.Lists;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,18 +16,18 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.junit.Assert;
import com.google.common.collect.Lists;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.Collections;
import java.util.List;
@ -24,8 +24,8 @@ import java.util.List;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,17 +16,17 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
import com.google.common.collect.Lists;

View File

@ -16,18 +16,18 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,16 +16,16 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
import com.google.common.collect.Lists;

View File

@ -16,17 +16,17 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.PrefixTreeTestConstants;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;
import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.row.data;
package org.apache.hadoop.hbase.codec.prefixtree.row.data;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -24,12 +24,12 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnNodeWriter;
import org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowNodeWriter;
import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hbase.codec.prefixtree.encode.column.ColumnNodeWriter;
import org.apache.hbase.codec.prefixtree.encode.row.RowNodeWriter;
import org.apache.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.timestamp;
package org.apache.hadoop.hbase.codec.prefixtree.timestamp;
import java.util.Collection;
import java.util.List;
import org.apache.hbase.codec.prefixtree.timestamp.data.TestTimestampDataBasic;
import org.apache.hbase.codec.prefixtree.timestamp.data.TestTimestampDataNumbers;
import org.apache.hbase.codec.prefixtree.timestamp.data.TestTimestampDataRepeats;
import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataBasic;
import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataNumbers;
import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataRepeats;
import com.google.common.collect.Lists;

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.timestamp;
package org.apache.hadoop.hbase.codec.prefixtree.timestamp;
import java.io.IOException;
import java.util.Collection;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
import org.apache.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.timestamp.data;
package org.apache.hadoop.hbase.codec.prefixtree.timestamp.data;
import java.util.ArrayList;
import java.util.List;
import org.apache.hbase.codec.prefixtree.timestamp.TestTimestampData;
import org.apache.hadoop.hbase.codec.prefixtree.timestamp.TestTimestampData;
public class TestTimestampDataBasic implements TestTimestampData {

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.timestamp.data;
package org.apache.hadoop.hbase.codec.prefixtree.timestamp.data;
import java.util.ArrayList;
import java.util.List;
import org.apache.hbase.codec.prefixtree.timestamp.TestTimestampData;
import org.apache.hadoop.hbase.codec.prefixtree.timestamp.TestTimestampData;
public class TestTimestampDataNumbers implements TestTimestampData {

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.codec.prefixtree.timestamp.data;
package org.apache.hadoop.hbase.codec.prefixtree.timestamp.data;
import java.util.ArrayList;
import java.util.List;
import org.apache.hbase.codec.prefixtree.timestamp.TestTimestampData;
import org.apache.hadoop.hbase.codec.prefixtree.timestamp.TestTimestampData;
public class TestTimestampDataRepeats implements TestTimestampData {

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.bytes;
package org.apache.hadoop.hbase.util.bytes;
import junit.framework.Assert;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.comparator;
package org.apache.hadoop.hbase.util.comparator;
import java.util.Comparator;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.number;
package org.apache.hadoop.hbase.util.number;
import java.text.DecimalFormat;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase.util.number;
package org.apache.hadoop.hbase.util.number;
import java.util.Random;

View File

@ -16,11 +16,12 @@
* limitations under the License.
*/
package org.apache.hbase.util.vint;
package org.apache.hadoop.hbase.util.vint;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.hadoop.hbase.util.vint.UFIntTool;
import org.junit.Assert;
import org.junit.Test;

View File

@ -16,13 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.util.vint;
package org.apache.hadoop.hbase.util.vint;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
import org.junit.Assert;
import org.junit.Test;

View File

@ -16,13 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.util.vint;
package org.apache.hadoop.hbase.util.vint;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.hbase.util.number.RandomNumberUtils;
import org.apache.hadoop.hbase.util.number.RandomNumberUtils;
import org.apache.hadoop.hbase.util.vint.UVLongTool;
import org.junit.Assert;
import org.junit.Test;