HBASE-7797 Use consistent package name

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1450046 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-02-26 06:28:06 +00:00
parent 2fa525feea
commit aee5574a5a
57 changed files with 87 additions and 163 deletions

View File

@ -19,10 +19,10 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import java.util.ArrayList;
import java.util.Arrays;

View File

@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -25,6 +25,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hbase.Cell;
import java.io.Closeable;
import java.io.IOException;

View File

@ -25,12 +25,12 @@ import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
/**
* Used to perform Increment operations on a single row.

View File

@ -21,15 +21,15 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hbase.Cell;
import org.apache.hbase.CellScannable;
import org.apache.hbase.CellScanner;
import org.apache.hbase.CellUtil;
import java.util.ArrayList;
import java.util.HashMap;

View File

@ -28,13 +28,13 @@ import java.util.TreeMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hbase.Cell;
/**
* Used to perform Put operations for a single row.

View File

@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.SplitKeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import org.apache.hbase.CellScannable;
import org.apache.hbase.CellScanner;
import org.apache.hbase.CellUtil;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;

View File

@ -26,6 +26,8 @@ import com.google.protobuf.Message;
import com.google.protobuf.RpcChannel;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
@ -104,7 +106,6 @@ import org.apache.hadoop.hbase.util.Methods;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.apache.hbase.Cell;
import java.io.ByteArrayOutputStream;
import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase;
package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase;
package org.apache.hadoop.hbase;
import java.io.Serializable;
import java.util.Comparator;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase;
package org.apache.hadoop.hbase;
/**
* Implementer can return a CellScanner over its Cell content.

View File

@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hbase;
package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hbase.Cell;
import org.apache.hadoop.hbase.Cell;
/**
* An interface for iterating through a sequence of cells. Similar to Java's Iterator, but without
@ -43,8 +43,8 @@ import org.apache.hbase.Cell;
* // do something
* }
* </pre>
* <p>Often used reading {@link org.apache.hbase.Cell}s written by
* {@link org.apache.hbase.io.CellOutputStream}.
* <p>Often used reading {@link org.apache.hadoop.hbase.Cell}s written by
* {@link org.apache.hadoop.hbase.io.CellOutputStream}.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hbase;
package org.apache.hadoop.hbase;
import java.nio.ByteBuffer;
import java.util.Iterator;
@ -27,7 +27,6 @@ import java.util.NavigableMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.KeyValue;
/**
* Utility methods helpful slinging {@link Cell} instances.

View File

@ -39,8 +39,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.RawComparator;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import com.google.common.primitives.Longs;

View File

@ -26,7 +26,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hbase.CellComparator;
import com.google.common.collect.Lists;

View File

@ -26,8 +26,6 @@ import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hbase.Cell;
import org.apache.hbase.CellUtil;
/**
* static convenience methods for dealing with KeyValues and collections of KeyValues

View File

@ -15,12 +15,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hbase.Cell;
import org.apache.hadoop.hbase.Cell;
abstract class BaseDecoder implements Codec.Decoder {
final InputStream in;

View File

@ -15,12 +15,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hbase.Cell;
import org.apache.hadoop.hbase.Cell;
abstract class BaseEncoder implements Codec.Encoder {
protected final OutputStream out;

View File

@ -15,16 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import org.apache.hbase.CellUtil;
/**
* Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints

View File

@ -15,14 +15,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
import org.apache.hbase.CellScanner;
import org.apache.hbase.io.CellOutputStream;
/**
* Encoder/Decoder for Cell.

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import java.io.IOException;

View File

@ -15,15 +15,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hbase.Cell;
/**
* Codec that does KeyValue version 1 serialization.

View File

@ -16,14 +16,14 @@
* limitations under the License.
*/
package org.apache.hbase.io;
package org.apache.hadoop.hbase.io;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hbase.Cell;
import org.apache.hbase.CellScanner;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
/**
* Accepts a stream of Cells. This can be used to build a block of cells during compactions

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import static org.junit.Assert.*;
@ -25,11 +25,13 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.codec.CellCodec;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase.codec;
package org.apache.hadoop.hbase.codec;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@ -29,6 +29,8 @@ import java.io.IOException;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.codec.KeyValueCodec;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -1,72 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hbase;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
public class TestCellUtil {
@Test
public void testCreateCellScannerCellList() {
final int count = 3;
Cell [] cs = getCells(count, Bytes.toBytes(0));
List<Cell> cells = Arrays.asList(cs);
CellScanner scanner = CellUtil.createCellScanner(cells);
int i = 0;
while (scanner.advance()) {
i++;
}
assertEquals(count, i);
}
@Test
public void testCreateCellScannerFamilyMap() {
final int count = 3;
final NavigableMap<byte [], List<? extends Cell>> map =
new TreeMap<byte [], List<? extends Cell>>(Bytes.BYTES_COMPARATOR);
for (int i = 0; i < count; i++) {
byte [] key = Bytes.toBytes(i);
KeyValue [] cs = getCells(count, key);
map.put(key, Arrays.asList(cs));
}
CellScanner scanner = CellUtil.createCellScanner(map);
int i = 0;
while (scanner.advance()) {
i++;
}
assertEquals(count * count, i);
}
static KeyValue [] getCells(final int howMany, final byte [] family) {
KeyValue [] cells = new KeyValue[howMany];
for (int i = 0; i < howMany; i++) {
byte [] index = Bytes.toBytes(i);
KeyValue kv = new KeyValue(index, family, index, index);
cells[i] = kv;
}
return cells;
}
}

View File

@ -21,11 +21,11 @@ package org.apache.hbase.codec.prefixtree;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
import org.apache.hbase.Cell;
import org.apache.hbase.CellUtil;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;

View File

@ -19,9 +19,9 @@
package org.apache.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import org.apache.hbase.CellScanner;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hbase.codec.prefixtree.decode.row.RowNodeReader;

View File

@ -19,8 +19,8 @@
package org.apache.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.Cell;
import org.apache.hbase.CellUtil;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;

View File

@ -19,10 +19,10 @@
package org.apache.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
/**
* As the PrefixTreeArrayScanner moves through the tree bytes, it changes the values in the fields

View File

@ -24,19 +24,19 @@ import java.io.OutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.CellOutputStream;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hbase.Cell;
import org.apache.hbase.CellUtil;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.other.CellTypeEncoder;
import org.apache.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hbase.codec.prefixtree.encode.row.RowSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
import org.apache.hbase.io.CellOutputStream;
import org.apache.hbase.util.byterange.ByteRangeSet;
import org.apache.hbase.util.byterange.impl.ByteRangeHashSet;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;

View File

@ -19,7 +19,7 @@
package org.apache.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.Cell;
import org.apache.hadoop.hbase.Cell;
/**
* Methods for seeking to a random {@link Cell} inside a sorted collection of cells. Indicates that

View File

@ -19,7 +19,7 @@
package org.apache.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.CellScanner;
import org.apache.hadoop.hbase.CellScanner;
/**
* An extension of CellScanner indicating the scanner supports iterating backwards through cells.

View File

@ -20,8 +20,8 @@ package org.apache.hbase.codec.prefixtree.row;
import java.util.List;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;

View File

@ -24,11 +24,11 @@ import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;

View File

@ -25,10 +25,10 @@ import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;

View File

@ -21,10 +21,10 @@ package org.apache.hbase.codec.prefixtree.row.data;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;

View File

@ -20,10 +20,10 @@ package org.apache.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;

View File

@ -20,11 +20,11 @@ package org.apache.hbase.codec.prefixtree.row.data;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;

View File

@ -24,13 +24,13 @@ import java.util.TreeSet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.StringUtils;
import org.apache.hbase.Cell;
/**
* Emits sorted Puts.

View File

@ -73,6 +73,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.exceptions.DroppedSnapshotException;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -136,7 +137,6 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.hbase.Cell;
import org.cliffc.high_scale_lib.Counter;
import com.google.common.base.Preconditions;

View File

@ -45,6 +45,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -78,7 +79,6 @@ import org.apache.hadoop.hbase.util.CollectionBackedScanner;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hbase.Cell;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableCollection;

View File

@ -35,6 +35,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@ -44,7 +45,6 @@ import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.regionserver.MemStoreLAB.Allocation;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hbase.Cell;
/**
* The MemStore holds in-memory modifications to the Store. Modifications

View File

@ -23,6 +23,7 @@ import java.util.List;
import java.util.Map;
import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Delete;
@ -32,7 +33,6 @@ import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProto
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
/**
* A <code>MultiRowProcessor</code> that performs multiple puts and deletes.

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
@ -36,7 +37,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hbase.Cell;
/**
* Interface for objects that hold a column family in a Region. Its a memstore and a set of zero or

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
@ -64,7 +65,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
/**
* HTable interface to remote tables accessed via REST gateway

View File

@ -33,6 +33,7 @@ import com.google.protobuf.Service;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@ -71,7 +72,6 @@ import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hbase.Cell;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ListMultimap;

View File

@ -260,10 +260,10 @@ public class ThriftUtilities {
}
// Map<family, List<KeyValue>>
for (Map.Entry<byte[], List<? extends org.apache.hbase.Cell>> familyEntry:
for (Map.Entry<byte[], List<? extends org.apache.hadoop.hbase.Cell>> familyEntry:
in.getFamilyMap().entrySet()) {
TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
for (org.apache.hbase.Cell cell: familyEntry.getValue()) {
for (org.apache.hadoop.hbase.Cell cell: familyEntry.getValue()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
byte[] family = kv.getFamily();
byte[] qualifier = kv.getQualifier();

View File

@ -33,6 +33,7 @@ import java.util.NavigableSet;
import com.google.common.collect.ImmutableList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
@ -52,7 +53,6 @@ import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hbase.Cell;
/**
* A sample region observer that tests the RegionObserver interface.

View File

@ -26,6 +26,7 @@ import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
@ -41,7 +42,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import org.apache.hbase.Cell;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;

View File

@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hbase.Cell;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;

View File

@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.CellComparator;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -45,6 +45,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
@ -93,8 +95,6 @@ import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hbase.Cell;
import org.apache.hbase.CellComparator;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;

View File

@ -37,6 +37,7 @@ import java.util.TreeSet;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.KeyValue;
@ -48,7 +49,6 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.CellComparator;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@ -44,7 +45,6 @@ import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hbase.Cell;
/**
* This class runs performance benchmarks for {@link HLog}.

View File

@ -32,6 +32,7 @@ import java.util.NavigableSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -56,7 +57,6 @@ import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;