diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
index 7d5c476fcd1..acff1867050 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java
@@ -487,14 +487,5 @@ public class RegionInfoBuilder {
return RegionInfo.COMPARATOR.compare(this, other);
}
- /**
- * @return Comparator to use comparing {@link KeyValue}s.
- * @deprecated Use Region#getCellComparator(). deprecated for hbase 2.0, remove for hbase 3.0
- */
- @Deprecated
- public KeyValue.KVComparator getComparator() {
- return isMetaRegion()?
- KeyValue.META_COMPARATOR: KeyValue.COMPARATOR;
- }
}
}
\ No newline at end of file
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index a3029f86695..dc5df304044 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIMITER;
+import static org.apache.hadoop.hbase.KeyValue.getDelimiter;
+import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIM_ARRAY;
import java.io.DataOutputStream;
import java.io.IOException;
@@ -126,6 +129,51 @@ public final class CellUtil {
return output;
}
+ /**
+ * Makes a column in family:qualifier form from separate byte arrays.
+ *
+ * Not recommended for usage as this is old-style API.
+ * @param family
+ * @param qualifier
+ * @return family:qualifier
+ */
+ public static byte [] makeColumn(byte [] family, byte [] qualifier) {
+ return Bytes.add(family, COLUMN_FAMILY_DELIM_ARRAY, qualifier);
+ }
+
+ /**
+ * Splits a column in {@code family:qualifier} form into separate byte arrays. An empty qualifier
+ * (ie, {@code fam:}) is parsed as { fam, EMPTY_BYTE_ARRAY }
while no delimiter (ie,
+ * {@code fam}) is parsed as an array of one element, { fam }
.
+ *
+ * Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549)
+ *
+ *
+ * Not recommend to be used as this is old-style API.
+ *
+ * @param c The column.
+ * @return The parsed column.
+ */
+ public static byte [][] parseColumn(byte [] c) {
+ final int index = getDelimiter(c, 0, c.length, COLUMN_FAMILY_DELIMITER);
+ if (index == -1) {
+ // If no delimiter, return array of size 1
+ return new byte [][] { c };
+ } else if(index == c.length - 1) {
+ // family with empty qualifier, return array size 2
+ byte [] family = new byte[c.length-1];
+ System.arraycopy(c, 0, family, 0, family.length);
+ return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY};
+ }
+ // Family and column, return array size 2
+ final byte [][] result = new byte [2][];
+ result[0] = new byte [index];
+ System.arraycopy(c, 0, result[0], 0, index);
+ final int len = c.length - (index + 1);
+ result[1] = new byte[len];
+ System.arraycopy(c, index + 1 /* Skip delimiter */, result[1], 0, len);
+ return result;
+ }
/******************** copyTo **********************************/
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index ae957383422..66ff72a69cd 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -23,9 +23,7 @@ import static org.apache.hadoop.hbase.util.Bytes.len;
import java.io.DataInput;
import java.io.DataOutput;
-import java.io.EOFException;
import java.io.IOException;
-import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
@@ -41,11 +39,9 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
-import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-
/**
* An HBase Key/Value. This is the fundamental HBase Type.
*
@@ -101,25 +97,18 @@ public class KeyValue implements ExtendedCell {
/**
* Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion
* of KeyValue only.
- * @deprecated Use {@link CellComparator#COMPARATOR} instead
+ * @deprecated Use {@link CellComparator#COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0.
*/
@Deprecated
public static final KVComparator COMPARATOR = new KVComparator();
/**
* A {@link KVComparator} for hbase:meta
catalog table
* {@link KeyValue}s.
- * @deprecated Use {@link CellComparator#META_COMPARATOR} instead
+ * @deprecated Use {@link CellComparator#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0.
*/
@Deprecated
public static final KVComparator META_COMPARATOR = new MetaComparator();
- /**
- * Needed for Bloom Filters.
- * * @deprecated Use {@link Bytes#BYTES_RAWCOMPARATOR} instead
- */
- @Deprecated
- public static final KVComparator RAW_COMPARATOR = new RawBytesComparator();
-
/** Size of the key length field in bytes*/
public static final int KEY_LENGTH_SIZE = Bytes.SIZEOF_INT;
@@ -291,15 +280,6 @@ public class KeyValue implements ExtendedCell {
protected int offset = 0; // offset into bytes buffer KV starts at
protected int length = 0; // length of the KV starting from offset.
- /**
- * @return True if a delete type, a {@link KeyValue.Type#Delete} or
- * a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
- * KeyValue type.
- */
- public static boolean isDelete(byte t) {
- return Type.Delete.getCode() <= t && t <= Type.DeleteFamily.getCode();
- }
-
/** Here be dragons **/
/**
@@ -1508,14 +1488,6 @@ public class KeyValue implements ExtendedCell {
return Bytes.toLong(this.bytes, tsOffset);
}
- /**
- * @return Type of this KeyValue.
- */
- @Deprecated
- public byte getType() {
- return getTypeByte();
- }
-
/**
* @return KeyValue.TYPE byte representation
*/
@@ -1524,16 +1496,6 @@ public class KeyValue implements ExtendedCell {
return this.bytes[this.offset + getKeyLength() - 1 + ROW_OFFSET];
}
- /**
- * @return True if a delete type, a {@link KeyValue.Type#Delete} or
- * a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
- * KeyValue type.
- */
- @Deprecated // use CellUtil#isDelete
- public boolean isDelete() {
- return KeyValue.isDelete(getType());
- }
-
/**
* This returns the offset where the tag actually starts.
*/
@@ -1601,52 +1563,6 @@ public class KeyValue implements ExtendedCell {
return new KeyValue(newBuffer);
}
- /**
- * Splits a column in {@code family:qualifier} form into separate byte arrays. An empty qualifier
- * (ie, {@code fam:}) is parsed as { fam, EMPTY_BYTE_ARRAY }
while no delimiter (ie,
- * {@code fam}) is parsed as an array of one element, { fam }
.
- *
- * Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549)
- *
- *
- * Not recommend to be used as this is old-style API.
- *
- * @param c The column.
- * @return The parsed column.
- */
- public static byte [][] parseColumn(byte [] c) {
- final int index = getDelimiter(c, 0, c.length, COLUMN_FAMILY_DELIMITER);
- if (index == -1) {
- // If no delimiter, return array of size 1
- return new byte [][] { c };
- } else if(index == c.length - 1) {
- // family with empty qualifier, return array size 2
- byte [] family = new byte[c.length-1];
- System.arraycopy(c, 0, family, 0, family.length);
- return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY};
- }
- // Family and column, return array size 2
- final byte [][] result = new byte [2][];
- result[0] = new byte [index];
- System.arraycopy(c, 0, result[0], 0, index);
- final int len = c.length - (index + 1);
- result[1] = new byte[len];
- System.arraycopy(c, index + 1 /* Skip delimiter */, result[1], 0, len);
- return result;
- }
-
- /**
- * Makes a column in family:qualifier form from separate byte arrays.
- *
- * Not recommended for usage as this is old-style API.
- * @param family
- * @param qualifier
- * @return family:qualifier
- */
- public static byte [] makeColumn(byte [] family, byte [] qualifier) {
- return Bytes.add(family, COLUMN_FAMILY_DELIM_ARRAY, qualifier);
- }
-
/**
* @param b
* @param delimiter
@@ -1692,7 +1608,7 @@ public class KeyValue implements ExtendedCell {
/**
* A {@link KVComparator} for hbase:meta
catalog table
* {@link KeyValue}s.
- * @deprecated : {@link CellComparator#META_COMPARATOR} to be used
+ * @deprecated : {@link CellComparator#META_COMPARATOR} to be used. Deprecated for hbase 2.0, remove for hbase 3.0.
*/
@Deprecated
public static class MetaComparator extends KVComparator {
@@ -1807,7 +1723,7 @@ public class KeyValue implements ExtendedCell {
* Compare KeyValues. When we compare KeyValues, we only compare the Key
* portion. This means two KeyValues with same Key but different Values are
* considered the same as far as this Comparator is concerned.
- * @deprecated : Use {@link CellComparator}.
+ * @deprecated : Use {@link CellComparator}. Deprecated for hbase 2.0, remove for hbase 3.0.
*/
@Deprecated
public static class KVComparator implements RawComparator, SamePrefixComparator {
@@ -2322,40 +2238,6 @@ public class KeyValue implements ExtendedCell {
}
- /**
- * @param b
- * @return A KeyValue made of a byte array that holds the key-only part.
- * Needed to convert hfile index members to KeyValues.
- */
- public static KeyValue createKeyValueFromKey(final byte [] b) {
- return createKeyValueFromKey(b, 0, b.length);
- }
-
- /**
- * @param bb
- * @return A KeyValue made of a byte buffer that holds the key-only part.
- * Needed to convert hfile index members to KeyValues.
- */
- public static KeyValue createKeyValueFromKey(final ByteBuffer bb) {
- return createKeyValueFromKey(bb.array(), bb.arrayOffset(), bb.limit());
- }
-
- /**
- * @param b
- * @param o
- * @param l
- * @return A KeyValue made of a byte array that holds the key-only part.
- * Needed to convert hfile index members to KeyValues.
- */
- public static KeyValue createKeyValueFromKey(final byte [] b, final int o,
- final int l) {
- byte [] newb = new byte[l + ROW_OFFSET];
- System.arraycopy(b, o, newb, ROW_OFFSET, l);
- Bytes.putInt(newb, 0, l);
- Bytes.putInt(newb, Bytes.SIZEOF_INT, 0);
- return new KeyValue(newb);
- }
-
/**
* @param in Where to read bytes from. Creates a byte array to hold the KeyValue
* backing bytes copied from the steam.
@@ -2388,55 +2270,6 @@ public class KeyValue implements ExtendedCell {
return new KeyValue(bytes, 0, length);
}
- /**
- * Create a new KeyValue by copying existing cell and adding new tags
- * @param c
- * @param newTags
- * @return a new KeyValue instance with new tags
- */
- public static KeyValue cloneAndAddTags(Cell c, List newTags) {
- List existingTags = null;
- if(c.getTagsLength() > 0) {
- existingTags = CellUtil.getTags(c);
- existingTags.addAll(newTags);
- } else {
- existingTags = newTags;
- }
- return new KeyValue(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(),
- c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(),
- c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(),
- c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(),
- c.getValueLength(), existingTags);
- }
-
- /**
- * Create a KeyValue reading from the raw InputStream.
- * Named iscreate so doesn't clash with {@link #create(DataInput)}
- * @param in
- * @return Created KeyValue or throws an exception
- * @throws IOException
- * {@link Deprecated} As of 1.2. Use {@link KeyValueUtil#iscreate(InputStream, boolean)} instead.
- */
- @Deprecated
- public static KeyValue iscreate(final InputStream in) throws IOException {
- byte [] intBytes = new byte[Bytes.SIZEOF_INT];
- int bytesRead = 0;
- while (bytesRead < intBytes.length) {
- int n = in.read(intBytes, bytesRead, intBytes.length - bytesRead);
- if (n < 0) {
- if (bytesRead == 0) {
- throw new EOFException();
- }
- throw new IOException("Failed read of int, read " + bytesRead + " bytes");
- }
- bytesRead += n;
- }
- // TODO: perhaps some sanity check is needed here.
- byte [] bytes = new byte[Bytes.toInt(intBytes)];
- IOUtils.readFully(in, bytes, 0, bytes.length);
- return new KeyValue(bytes, 0, bytes.length);
- }
-
/**
* Write out a KeyValue in the manner in which we used to when KeyValue was a Writable.
* @param kv
@@ -2496,23 +2329,6 @@ public class KeyValue implements ExtendedCell {
ByteBufferUtils.copyFromArrayToBuffer(buf, offset, this.bytes, this.offset, this.length);
}
- /**
- * Comparator that compares row component only of a KeyValue.
- */
- public static class RowOnlyComparator implements Comparator {
- final KVComparator comparator;
-
- public RowOnlyComparator(final KVComparator c) {
- this.comparator = c;
- }
-
- @Override
- public int compare(KeyValue left, KeyValue right) {
- return comparator.compareRows(left, right);
- }
- }
-
-
/**
* Avoids redundant comparisons for better performance.
*
@@ -2528,71 +2344,6 @@ public class KeyValue implements ExtendedCell {
);
}
- /**
- * @deprecated Not to be used for any comparsions
- */
- @Deprecated
- public static class RawBytesComparator extends KVComparator {
- /**
- * The HFileV2 file format's trailer contains this class name. We reinterpret this and
- * instantiate the appropriate comparator.
- * TODO: With V3 consider removing this.
- * @return legacy class name for FileFileTrailer#comparatorClassName
- */
- @Override
- public String getLegacyKeyComparatorName() {
- return "org.apache.hadoop.hbase.util.Bytes$ByteArrayComparator";
- }
-
- /**
- * @deprecated Since 0.99.2.
- */
- @Override
- @Deprecated
- public int compareFlatKey(byte[] left, int loffset, int llength, byte[] right,
- int roffset, int rlength) {
- return Bytes.BYTES_RAWCOMPARATOR.compare(left, loffset, llength, right, roffset, rlength);
- }
-
- @Override
- public int compare(Cell left, Cell right) {
- return compareOnlyKeyPortion(left, right);
- }
-
- @Override
- @VisibleForTesting
- public int compareOnlyKeyPortion(Cell left, Cell right) {
- int c = Bytes.BYTES_RAWCOMPARATOR.compare(left.getRowArray(), left.getRowOffset(),
- left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
- if (c != 0) {
- return c;
- }
- c = Bytes.BYTES_RAWCOMPARATOR.compare(left.getFamilyArray(), left.getFamilyOffset(),
- left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(),
- right.getFamilyLength());
- if (c != 0) {
- return c;
- }
- c = Bytes.BYTES_RAWCOMPARATOR.compare(left.getQualifierArray(), left.getQualifierOffset(),
- left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
- right.getQualifierLength());
- if (c != 0) {
- return c;
- }
- c = compareTimestamps(left.getTimestamp(), right.getTimestamp());
- if (c != 0) {
- return c;
- }
- return (0xff & left.getTypeByte()) - (0xff & right.getTypeByte());
- }
-
- @Override
- public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) {
- return firstKeyInBlock;
- }
-
- }
-
/**
* HeapSize implementation
*
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
index 79d264faeed..74a4c8a3600 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
@@ -26,7 +26,6 @@ import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.KeyValue.KVComparator;
/**
* Immutable POJO class for representing a table name.
@@ -536,19 +535,4 @@ public final class TableName implements Comparable {
return this.nameAsString.compareTo(tableName.getNameAsString());
}
- /**
- * Get the appropriate row comparator for this table.
- *
- * @return The comparator.
- * @deprecated The comparator is an internal property of the table. Should
- * not have been exposed here
- */
- @InterfaceAudience.Private
- @Deprecated
- public KVComparator getRowComparator() {
- if(TableName.META_TABLE_NAME.equals(this)) {
- return KeyValue.META_COMPARATOR;
- }
- return KeyValue.COMPARATOR;
- }
}
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
index ffa6e243ed6..594816fcf50 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
@@ -118,7 +117,7 @@ implements TableMap {
int numCols = columns.length;
if (numCols > 0) {
for (Cell value: r.listCells()) {
- byte [] column = KeyValue.makeColumn(CellUtil.cloneFamily(value),
+ byte [] column = CellUtil.makeColumn(CellUtil.cloneFamily(value),
CellUtil.cloneQualifier(value));
for (int i = 0; i < numCols; i++) {
if (Bytes.equals(column, columns[i])) {
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java
index a5ac5c8a0cc..1909b2d57b3 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -108,7 +107,7 @@ extends TableMapper implements Configurable {
int numCols = columns.length;
if (numCols > 0) {
for (Cell value: r.listCells()) {
- byte [] column = KeyValue.makeColumn(CellUtil.cloneFamily(value),
+ byte [] column = CellUtil.makeColumn(CellUtil.cloneFamily(value),
CellUtil.cloneQualifier(value));
for (int i = 0; i < numCols; i++) {
if (Bytes.equals(column, columns[i])) {
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
index b25bff596b5..9eefac9defe 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
@@ -27,7 +27,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection;
@@ -213,7 +213,7 @@ implements Configurable {
* @throws IllegalArgumentException When familyAndQualifier is invalid.
*/
private static void addColumn(Scan scan, byte[] familyAndQualifier) {
- byte [][] fq = KeyValue.parseColumn(familyAndQualifier);
+ byte [][] fq = CellUtil.parseColumn(familyAndQualifier);
if (fq.length == 1) {
scan.addFamily(fq[0]);
} else if (fq.length == 2) {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index 199a60c0464..2ecf17f3ae4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -43,7 +43,6 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Increment;
@@ -233,7 +232,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
.build();
}
- byte [][] parts = KeyValue.parseColumn(col);
+ byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
@@ -301,7 +300,7 @@ public class RowResource extends ResourceBase {
.build();
}
Put put = new Put(row);
- byte parts[][] = KeyValue.parseColumn(column);
+ byte parts[][] = CellUtil.parseColumn(column);
if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
@@ -390,7 +389,7 @@ public class RowResource extends ResourceBase {
delete = new Delete(rowspec.getRow());
for (byte[] column: rowspec.getColumns()) {
- byte[][] split = KeyValue.parseColumn(column);
+ byte[][] split = CellUtil.parseColumn(column);
if (rowspec.hasTimestamp()) {
if (split.length == 1) {
delete.addFamily(split[0], rowspec.getTimestamp());
@@ -473,7 +472,7 @@ public class RowResource extends ResourceBase {
boolean retValue;
CellModel valueToCheckCell = cellModels.get(cellModelCount - 1);
byte[] valueToCheckColumn = valueToCheckCell.getColumn();
- byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
+ byte[][] valueToPutParts = CellUtil.parseColumn(valueToCheckColumn);
if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
CellModel valueToPutCell = null;
@@ -490,7 +489,7 @@ public class RowResource extends ResourceBase {
.build();
}
- byte [][] parts = KeyValue.parseColumn(col);
+ byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST)
@@ -606,7 +605,7 @@ public class RowResource extends ResourceBase {
.build();
}
- parts = KeyValue.parseColumn(col);
+ parts = CellUtil.parseColumn(col);
if (parts.length == 1) {
// Only Column Family is specified
@@ -623,7 +622,7 @@ public class RowResource extends ResourceBase {
}
}
- parts = KeyValue.parseColumn(valueToDeleteColumn);
+ parts = CellUtil.parseColumn(valueToDeleteColumn);
if (parts.length == 2) {
if (parts[1].length != 0) {
// To support backcompat of deleting a cell
@@ -722,7 +721,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
.build();
}
- byte [][] parts = KeyValue.parseColumn(col);
+ byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) {
servlet.getMetrics().incrementFailedAppendRequests(1);
return Response.status(Response.Status.BAD_REQUEST)
@@ -816,7 +815,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
.build();
}
- byte [][] parts = KeyValue.parseColumn(col);
+ byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) {
servlet.getMetrics().incrementFailedIncrementRequests(1);
return Response.status(Response.Status.BAD_REQUEST)
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
index 4cf8492a823..1edd73a063b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
@@ -25,10 +25,10 @@ import java.util.NoSuchElementException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
@@ -50,7 +50,7 @@ public class RowResultGenerator extends ResultGenerator {
Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) {
for (byte[] col: rowspec.getColumns()) {
- byte[][] split = KeyValue.parseColumn(col);
+ byte[][] split = CellUtil.parseColumn(col);
if (split.length == 1) {
get.addFamily(split[0]);
} else if (split.length == 2) {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index 718bdff0513..8f5611589b4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -35,7 +35,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.model.CellModel;
@@ -175,7 +174,7 @@ public class ScannerInstanceResource extends ResourceBase {
response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
response.header("X-Column",
Base64.encodeBytes(
- KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
+ CellUtil.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
servlet.getMetrics().incrementSucessfulGetRequests(1);
return response.build();
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
index 50208cf61d1..ece4f1249ba 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
@@ -25,7 +25,7 @@ import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownScannerException;
@@ -80,7 +80,7 @@ public class ScannerResultGenerator extends ResultGenerator {
if (rowspec.hasColumns()) {
byte[][] columns = rowspec.getColumns();
for (byte[] column: columns) {
- byte[][] split = KeyValue.parseColumn(column);
+ byte[][] split = CellUtil.parseColumn(column);
if (split.length == 1) {
scan.addFamily(split[0]);
} else if (split.length == 2) {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 8899044783d..b15537a88c6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -177,7 +177,7 @@ public class RemoteHTable implements Table {
for (RowModel row: model.getRows()) {
List kvs = new ArrayList<>(row.getCells().size());
for (CellModel cell: row.getCells()) {
- byte[][] split = KeyValue.parseColumn(cell.getColumn());
+ byte[][] split = CellUtil.parseColumn(cell.getColumn());
byte[] column = split[0];
byte[] qualifier = null;
if (split.length == 1) {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
index b5622d03f29..3465490cce4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
@@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
@@ -132,7 +131,7 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
*/
public CellModel(byte[] column, byte[] qualifier, long timestamp,
byte[] value) {
- this.column = KeyValue.makeColumn(column, qualifier);
+ this.column = CellUtil.makeColumn(column, qualifier);
this.timestamp = timestamp;
this.value = value;
}
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index 2b47c4f9aef..2b2e5e3700d 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -44,7 +44,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
@@ -93,7 +93,7 @@ public class TestScannerResource {
throws IOException {
Random rng = new Random();
byte[] k = new byte[3];
- byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(Bytes.toBytes(column));
List puts = new ArrayList<>();
for (byte b1 = 'a'; b1 < 'z'; b1++) {
for (byte b2 = 'a'; b2 < 'z'; b2++) {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index 5e57e983d60..c8bbc24149e 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -295,7 +295,7 @@ public class TestScannersWithFilters {
for (CellModel cell: cells) {
assertTrue("Row mismatch",
Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx])));
- byte[][] split = KeyValue.parseColumn(cell.getColumn());
+ byte[][] split = CellUtil.parseColumn(cell.getColumn());
assertTrue("Family mismatch",
Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx])));
assertTrue("Qualifier mismatch",
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
index 2d5a0c629ed..6ac8e87cb92 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
@@ -21,7 +21,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
@@ -92,7 +92,7 @@ public class TestScannersWithLabels {
private static int insertData(TableName tableName, String column, double prob) throws IOException {
byte[] k = new byte[3];
- byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
+ byte[][] famAndQf = CellUtil.parseColumn(Bytes.toBytes(column));
List puts = new ArrayList<>(9);
for (int i = 0; i < 9; i++) {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index 826468070f2..26891774b74 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
@@ -99,7 +99,7 @@ public class TestTableResource {
htd.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
admin.createTable(htd);
byte[] k = new byte[3];
- byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(COLUMN));
+ byte [][] famAndQf = CellUtil.parseColumn(Bytes.toBytes(COLUMN));
List puts = new ArrayList<>();
for (byte b1 = 'a'; b1 < 'z'; b1++) {
for (byte b2 = 'a'; b2 < 'z'; b2++) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index 6628e742bdc..d82dd170ddb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -565,8 +565,8 @@ public class FixedFileTrailer {
} else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName())
|| comparatorClassName.equals(KeyValue.META_COMPARATOR.getClass().getName())) {
comparatorKlass = MetaCellComparator.class;
- } else if (comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getClass().getName())
- || comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName())) {
+ } else if (comparatorClassName.equals("org.apache.hadoop.hbase.KeyValue.RawBytesComparator")
+ || comparatorClassName.equals("org.apache.hadoop.hbase.util.Bytes$ByteArrayComparator")) {
// When the comparator to be used is Bytes.BYTES_RAWCOMPARATOR, we just return null from here
// Bytes.BYTES_RAWCOMPARATOR is not a CellComparator
comparatorKlass = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 153f36b0e4f..32cffc0887a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -326,7 +326,7 @@ public abstract class HBaseTestCase extends TestCase {
}
}
byte[][] split =
- KeyValue.parseColumn(Bytes.toBytes(sb.toString()));
+ CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
if(split.length == 1) {
byte[] qualifier = new byte[0];
put.addColumn(split[0], qualifier, t);
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index edd251db598..3f0530a3386 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -34,7 +34,7 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
import org.apache.hadoop.hbase.thrift.generated.TIncrement;
@@ -196,7 +196,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
}
private boolean internalQueueTincrement(TIncrement inc) throws TException {
- byte[][] famAndQf = KeyValue.parseColumn(inc.getColumn());
+ byte[][] famAndQf = CellUtil.parseColumn(inc.getColumn());
if (famAndQf.length != 2) return false;
return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], famAndQf[1],
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index c1e761aec69..a6c59c0a623 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -923,7 +924,7 @@ public class ThriftServerRunner implements Runnable {
ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
Map attributes)
throws IOError {
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
return get(tableName, row, famAndQf[0], null, attributes);
}
@@ -968,7 +969,7 @@ public class ThriftServerRunner implements Runnable {
@Override
public List getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
int numVersions, Map attributes) throws IOError {
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if(famAndQf.length == 1) {
return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
}
@@ -1014,7 +1015,7 @@ public class ThriftServerRunner implements Runnable {
@Override
public List getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
long timestamp, int numVersions, Map attributes) throws IOError {
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
}
@@ -1101,7 +1102,7 @@ public class ThriftServerRunner implements Runnable {
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
for(ByteBuffer column : columns) {
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]);
} else {
@@ -1167,7 +1168,7 @@ public class ThriftServerRunner implements Runnable {
if (columns != null) {
for(ByteBuffer column : columns) {
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]);
} else {
@@ -1207,7 +1208,7 @@ public class ThriftServerRunner implements Runnable {
table = getTable(tableName);
Delete delete = new Delete(getBytes(row));
addAttributes(delete, attributes);
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
delete.addFamily(famAndQf[0], timestamp);
} else {
@@ -1320,7 +1321,7 @@ public class ThriftServerRunner implements Runnable {
// I apologize for all this mess :)
for (Mutation m : mutations) {
- byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
+ byte[][] famAndQf = CellUtil.parseColumn(getBytes(m.column));
if (m.isDelete) {
if (famAndQf.length == 1) {
delete.addFamily(famAndQf[0], timestamp);
@@ -1379,7 +1380,7 @@ public class ThriftServerRunner implements Runnable {
Put put = new Put(row, timestamp);
addAttributes(put, attributes);
for (Mutation m : mutations) {
- byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
+ byte[][] famAndQf = CellUtil.parseColumn(getBytes(m.column));
if (m.isDelete) {
// no qualifier, family only.
if (famAndQf.length == 1) {
@@ -1433,7 +1434,7 @@ public class ThriftServerRunner implements Runnable {
public long atomicIncrement(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
throws IOError, IllegalArgument, TException {
- byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if(famAndQf.length == 1) {
return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
}
@@ -1525,7 +1526,7 @@ public class ThriftServerRunner implements Runnable {
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
- byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
@@ -1565,7 +1566,7 @@ public class ThriftServerRunner implements Runnable {
addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
- byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
@@ -1595,7 +1596,7 @@ public class ThriftServerRunner implements Runnable {
addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
- byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
@@ -1629,7 +1630,7 @@ public class ThriftServerRunner implements Runnable {
scan.setFilter(f);
if (columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
- byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
@@ -1659,7 +1660,7 @@ public class ThriftServerRunner implements Runnable {
scan.setTimeRange(0, timestamp);
if (columns != null && columns.size() != 0) {
for (ByteBuffer column : columns) {
- byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
@@ -1690,7 +1691,7 @@ public class ThriftServerRunner implements Runnable {
scan.setTimeRange(0, timestamp);
if (columns != null && columns.size() != 0) {
for (ByteBuffer column : columns) {
- byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+ byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
@@ -1868,7 +1869,7 @@ public class ThriftServerRunner implements Runnable {
put = new Put(getBytes(row), HConstants.LATEST_TIMESTAMP);
addAttributes(put, attributes);
- byte[][] famAndQf = KeyValue.parseColumn(getBytes(mput.column));
+ byte[][] famAndQf = CellUtil.parseColumn(getBytes(mput.column));
put.addImmutable(famAndQf[0], famAndQf[1], mput.value != null ? getBytes(mput.value)
: HConstants.EMPTY_BYTE_ARRAY);
@@ -1882,7 +1883,7 @@ public class ThriftServerRunner implements Runnable {
Table table = null;
try {
table = getTable(tableName);
- byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+ byte[][] famAndQf = CellUtil.parseColumn(getBytes(column));
return table.checkAndPut(getBytes(row), famAndQf[0], famAndQf[1],
value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put);
} catch (IOException e) {
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
index 9510040fd92..1ff91ee70a0 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
@@ -67,7 +67,7 @@ public class ThriftUtilities {
if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty");
}
- byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
+ byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName)
.setMaxVersions(in.maxVersions)
.setCompressionType(comp)
@@ -160,7 +160,7 @@ public class ThriftUtilities {
result.sortedColumns = new ArrayList<>();
for (Cell kv : result_.rawCells()) {
result.sortedColumns.add(new TColumn(
- ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.cloneFamily(kv),
+ ByteBuffer.wrap(CellUtil.makeColumn(CellUtil.cloneFamily(kv),
CellUtil.cloneQualifier(kv))),
new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), kv.getTimestamp())));
}
@@ -168,7 +168,7 @@ public class ThriftUtilities {
result.columns = new TreeMap<>();
for (Cell kv : result_.rawCells()) {
result.columns.put(
- ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.cloneFamily(kv),
+ ByteBuffer.wrap(CellUtil.makeColumn(CellUtil.cloneFamily(kv),
CellUtil.cloneQualifier(kv))),
new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), kv.getTimestamp()));
}
@@ -203,7 +203,7 @@ public class ThriftUtilities {
*/
public static Increment incrementFromThrift(TIncrement tincrement) {
Increment inc = new Increment(tincrement.getRow());
- byte[][] famAndQf = KeyValue.parseColumn(tincrement.getColumn());
+ byte[][] famAndQf = CellUtil.parseColumn(tincrement.getColumn());
if (famAndQf.length != 2) return null;
inc.addColumn(famAndQf[0], famAndQf[1], tincrement.getAmmount());
return inc;
@@ -227,7 +227,7 @@ public class ThriftUtilities {
int length = columns.size();
for (int i = 0; i < length; i++) {
- byte[][] famAndQf = KeyValue.parseColumn(getBytes(columns.get(i)));
+ byte[][] famAndQf = CellUtil.parseColumn(getBytes(columns.get(i)));
append.addColumn(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
}
return append;
| |