HBASE-13844 Move static helper methods from KeyValue into CellUtils

Signed-off-by: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
Andy Yang 2017-09-26 05:19:31 -07:00 committed by Chia-Ping Tsai
parent b5b4108fce
commit afce850cfd
22 changed files with 107 additions and 337 deletions

View File

@ -487,14 +487,5 @@ public class RegionInfoBuilder {
return RegionInfo.COMPARATOR.compare(this, other); return RegionInfo.COMPARATOR.compare(this, other);
} }
/**
* @return Comparator to use comparing {@link KeyValue}s.
* @deprecated Use Region#getCellComparator(). deprecated for hbase 2.0, remove for hbase 3.0
*/
@Deprecated
public KeyValue.KVComparator getComparator() {
return isMetaRegion()?
KeyValue.META_COMPARATOR: KeyValue.COMPARATOR;
}
} }
} }

View File

@ -20,6 +20,9 @@ package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY; import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIMITER;
import static org.apache.hadoop.hbase.KeyValue.getDelimiter;
import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIM_ARRAY;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
@ -126,6 +129,51 @@ public final class CellUtil {
return output; return output;
} }
/**
* Makes a column in family:qualifier form from separate byte arrays.
* <p>
* Not recommended for usage as this is old-style API.
* @param family
* @param qualifier
* @return family:qualifier
*/
public static byte [] makeColumn(byte [] family, byte [] qualifier) {
return Bytes.add(family, COLUMN_FAMILY_DELIM_ARRAY, qualifier);
}
/**
* Splits a column in {@code family:qualifier} form into separate byte arrays. An empty qualifier
* (ie, {@code fam:}) is parsed as <code>{ fam, EMPTY_BYTE_ARRAY }</code> while no delimiter (ie,
* {@code fam}) is parsed as an array of one element, <code>{ fam }</code>.
* <p>
* Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549)
* </p>
* <p>
* Not recommend to be used as this is old-style API.
* </p>
* @param c The column.
* @return The parsed column.
*/
public static byte [][] parseColumn(byte [] c) {
final int index = getDelimiter(c, 0, c.length, COLUMN_FAMILY_DELIMITER);
if (index == -1) {
// If no delimiter, return array of size 1
return new byte [][] { c };
} else if(index == c.length - 1) {
// family with empty qualifier, return array size 2
byte [] family = new byte[c.length-1];
System.arraycopy(c, 0, family, 0, family.length);
return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY};
}
// Family and column, return array size 2
final byte [][] result = new byte [2][];
result[0] = new byte [index];
System.arraycopy(c, 0, result[0], 0, index);
final int len = c.length - (index + 1);
result[1] = new byte[len];
System.arraycopy(c, index + 1 /* Skip delimiter */, result[1], 0, len);
return result;
}
/******************** copyTo **********************************/ /******************** copyTo **********************************/

View File

@ -23,9 +23,7 @@ import static org.apache.hadoop.hbase.util.Bytes.len;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.EOFException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
@ -41,11 +39,9 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* An HBase Key/Value. This is the fundamental HBase Type. * An HBase Key/Value. This is the fundamental HBase Type.
* <p> * <p>
@ -101,25 +97,18 @@ public class KeyValue implements ExtendedCell {
/** /**
* Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion
* of KeyValue only. * of KeyValue only.
* @deprecated Use {@link CellComparator#COMPARATOR} instead * @deprecated Use {@link CellComparator#COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0.
*/ */
@Deprecated @Deprecated
public static final KVComparator COMPARATOR = new KVComparator(); public static final KVComparator COMPARATOR = new KVComparator();
/** /**
* A {@link KVComparator} for <code>hbase:meta</code> catalog table * A {@link KVComparator} for <code>hbase:meta</code> catalog table
* {@link KeyValue}s. * {@link KeyValue}s.
* @deprecated Use {@link CellComparator#META_COMPARATOR} instead * @deprecated Use {@link CellComparator#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0.
*/ */
@Deprecated @Deprecated
public static final KVComparator META_COMPARATOR = new MetaComparator(); public static final KVComparator META_COMPARATOR = new MetaComparator();
/**
* Needed for Bloom Filters.
* * @deprecated Use {@link Bytes#BYTES_RAWCOMPARATOR} instead
*/
@Deprecated
public static final KVComparator RAW_COMPARATOR = new RawBytesComparator();
/** Size of the key length field in bytes*/ /** Size of the key length field in bytes*/
public static final int KEY_LENGTH_SIZE = Bytes.SIZEOF_INT; public static final int KEY_LENGTH_SIZE = Bytes.SIZEOF_INT;
@ -291,15 +280,6 @@ public class KeyValue implements ExtendedCell {
protected int offset = 0; // offset into bytes buffer KV starts at protected int offset = 0; // offset into bytes buffer KV starts at
protected int length = 0; // length of the KV starting from offset. protected int length = 0; // length of the KV starting from offset.
/**
* @return True if a delete type, a {@link KeyValue.Type#Delete} or
* a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
* KeyValue type.
*/
public static boolean isDelete(byte t) {
return Type.Delete.getCode() <= t && t <= Type.DeleteFamily.getCode();
}
/** Here be dragons **/ /** Here be dragons **/
/** /**
@ -1508,14 +1488,6 @@ public class KeyValue implements ExtendedCell {
return Bytes.toLong(this.bytes, tsOffset); return Bytes.toLong(this.bytes, tsOffset);
} }
/**
* @return Type of this KeyValue.
*/
@Deprecated
public byte getType() {
return getTypeByte();
}
/** /**
* @return KeyValue.TYPE byte representation * @return KeyValue.TYPE byte representation
*/ */
@ -1524,16 +1496,6 @@ public class KeyValue implements ExtendedCell {
return this.bytes[this.offset + getKeyLength() - 1 + ROW_OFFSET]; return this.bytes[this.offset + getKeyLength() - 1 + ROW_OFFSET];
} }
/**
* @return True if a delete type, a {@link KeyValue.Type#Delete} or
* a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
* KeyValue type.
*/
@Deprecated // use CellUtil#isDelete
public boolean isDelete() {
return KeyValue.isDelete(getType());
}
/** /**
* This returns the offset where the tag actually starts. * This returns the offset where the tag actually starts.
*/ */
@ -1601,52 +1563,6 @@ public class KeyValue implements ExtendedCell {
return new KeyValue(newBuffer); return new KeyValue(newBuffer);
} }
/**
* Splits a column in {@code family:qualifier} form into separate byte arrays. An empty qualifier
* (ie, {@code fam:}) is parsed as <code>{ fam, EMPTY_BYTE_ARRAY }</code> while no delimiter (ie,
* {@code fam}) is parsed as an array of one element, <code>{ fam }</code>.
* <p>
* Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549)
* </p>
* <p>
* Not recommend to be used as this is old-style API.
* </p>
* @param c The column.
* @return The parsed column.
*/
public static byte [][] parseColumn(byte [] c) {
final int index = getDelimiter(c, 0, c.length, COLUMN_FAMILY_DELIMITER);
if (index == -1) {
// If no delimiter, return array of size 1
return new byte [][] { c };
} else if(index == c.length - 1) {
// family with empty qualifier, return array size 2
byte [] family = new byte[c.length-1];
System.arraycopy(c, 0, family, 0, family.length);
return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY};
}
// Family and column, return array size 2
final byte [][] result = new byte [2][];
result[0] = new byte [index];
System.arraycopy(c, 0, result[0], 0, index);
final int len = c.length - (index + 1);
result[1] = new byte[len];
System.arraycopy(c, index + 1 /* Skip delimiter */, result[1], 0, len);
return result;
}
/**
* Makes a column in family:qualifier form from separate byte arrays.
* <p>
* Not recommended for usage as this is old-style API.
* @param family
* @param qualifier
* @return family:qualifier
*/
public static byte [] makeColumn(byte [] family, byte [] qualifier) {
return Bytes.add(family, COLUMN_FAMILY_DELIM_ARRAY, qualifier);
}
/** /**
* @param b * @param b
* @param delimiter * @param delimiter
@ -1692,7 +1608,7 @@ public class KeyValue implements ExtendedCell {
/** /**
* A {@link KVComparator} for <code>hbase:meta</code> catalog table * A {@link KVComparator} for <code>hbase:meta</code> catalog table
* {@link KeyValue}s. * {@link KeyValue}s.
* @deprecated : {@link CellComparator#META_COMPARATOR} to be used * @deprecated : {@link CellComparator#META_COMPARATOR} to be used. Deprecated for hbase 2.0, remove for hbase 3.0.
*/ */
@Deprecated @Deprecated
public static class MetaComparator extends KVComparator { public static class MetaComparator extends KVComparator {
@ -1807,7 +1723,7 @@ public class KeyValue implements ExtendedCell {
* Compare KeyValues. When we compare KeyValues, we only compare the Key * Compare KeyValues. When we compare KeyValues, we only compare the Key
* portion. This means two KeyValues with same Key but different Values are * portion. This means two KeyValues with same Key but different Values are
* considered the same as far as this Comparator is concerned. * considered the same as far as this Comparator is concerned.
* @deprecated : Use {@link CellComparator}. * @deprecated : Use {@link CellComparator}. Deprecated for hbase 2.0, remove for hbase 3.0.
*/ */
@Deprecated @Deprecated
public static class KVComparator implements RawComparator<Cell>, SamePrefixComparator<byte[]> { public static class KVComparator implements RawComparator<Cell>, SamePrefixComparator<byte[]> {
@ -2322,40 +2238,6 @@ public class KeyValue implements ExtendedCell {
} }
/**
* @param b
* @return A KeyValue made of a byte array that holds the key-only part.
* Needed to convert hfile index members to KeyValues.
*/
public static KeyValue createKeyValueFromKey(final byte [] b) {
return createKeyValueFromKey(b, 0, b.length);
}
/**
* @param bb
* @return A KeyValue made of a byte buffer that holds the key-only part.
* Needed to convert hfile index members to KeyValues.
*/
public static KeyValue createKeyValueFromKey(final ByteBuffer bb) {
return createKeyValueFromKey(bb.array(), bb.arrayOffset(), bb.limit());
}
/**
* @param b
* @param o
* @param l
* @return A KeyValue made of a byte array that holds the key-only part.
* Needed to convert hfile index members to KeyValues.
*/
public static KeyValue createKeyValueFromKey(final byte [] b, final int o,
final int l) {
byte [] newb = new byte[l + ROW_OFFSET];
System.arraycopy(b, o, newb, ROW_OFFSET, l);
Bytes.putInt(newb, 0, l);
Bytes.putInt(newb, Bytes.SIZEOF_INT, 0);
return new KeyValue(newb);
}
/** /**
* @param in Where to read bytes from. Creates a byte array to hold the KeyValue * @param in Where to read bytes from. Creates a byte array to hold the KeyValue
* backing bytes copied from the steam. * backing bytes copied from the steam.
@ -2388,55 +2270,6 @@ public class KeyValue implements ExtendedCell {
return new KeyValue(bytes, 0, length); return new KeyValue(bytes, 0, length);
} }
/**
* Create a new KeyValue by copying existing cell and adding new tags
* @param c
* @param newTags
* @return a new KeyValue instance with new tags
*/
public static KeyValue cloneAndAddTags(Cell c, List<Tag> newTags) {
List<Tag> existingTags = null;
if(c.getTagsLength() > 0) {
existingTags = CellUtil.getTags(c);
existingTags.addAll(newTags);
} else {
existingTags = newTags;
}
return new KeyValue(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(),
c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(),
c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(),
c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(),
c.getValueLength(), existingTags);
}
/**
* Create a KeyValue reading from the raw InputStream.
* Named <code>iscreate</code> so doesn't clash with {@link #create(DataInput)}
* @param in
* @return Created KeyValue or throws an exception
* @throws IOException
* {@link Deprecated} As of 1.2. Use {@link KeyValueUtil#iscreate(InputStream, boolean)} instead.
*/
@Deprecated
public static KeyValue iscreate(final InputStream in) throws IOException {
byte [] intBytes = new byte[Bytes.SIZEOF_INT];
int bytesRead = 0;
while (bytesRead < intBytes.length) {
int n = in.read(intBytes, bytesRead, intBytes.length - bytesRead);
if (n < 0) {
if (bytesRead == 0) {
throw new EOFException();
}
throw new IOException("Failed read of int, read " + bytesRead + " bytes");
}
bytesRead += n;
}
// TODO: perhaps some sanity check is needed here.
byte [] bytes = new byte[Bytes.toInt(intBytes)];
IOUtils.readFully(in, bytes, 0, bytes.length);
return new KeyValue(bytes, 0, bytes.length);
}
/** /**
* Write out a KeyValue in the manner in which we used to when KeyValue was a Writable. * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable.
* @param kv * @param kv
@ -2496,23 +2329,6 @@ public class KeyValue implements ExtendedCell {
ByteBufferUtils.copyFromArrayToBuffer(buf, offset, this.bytes, this.offset, this.length); ByteBufferUtils.copyFromArrayToBuffer(buf, offset, this.bytes, this.offset, this.length);
} }
/**
* Comparator that compares row component only of a KeyValue.
*/
public static class RowOnlyComparator implements Comparator<KeyValue> {
final KVComparator comparator;
public RowOnlyComparator(final KVComparator c) {
this.comparator = c;
}
@Override
public int compare(KeyValue left, KeyValue right) {
return comparator.compareRows(left, right);
}
}
/** /**
* Avoids redundant comparisons for better performance. * Avoids redundant comparisons for better performance.
* *
@ -2528,71 +2344,6 @@ public class KeyValue implements ExtendedCell {
); );
} }
/**
* @deprecated Not to be used for any comparsions
*/
@Deprecated
public static class RawBytesComparator extends KVComparator {
/**
* The HFileV2 file format's trailer contains this class name. We reinterpret this and
* instantiate the appropriate comparator.
* TODO: With V3 consider removing this.
* @return legacy class name for FileFileTrailer#comparatorClassName
*/
@Override
public String getLegacyKeyComparatorName() {
return "org.apache.hadoop.hbase.util.Bytes$ByteArrayComparator";
}
/**
* @deprecated Since 0.99.2.
*/
@Override
@Deprecated
public int compareFlatKey(byte[] left, int loffset, int llength, byte[] right,
int roffset, int rlength) {
return Bytes.BYTES_RAWCOMPARATOR.compare(left, loffset, llength, right, roffset, rlength);
}
@Override
public int compare(Cell left, Cell right) {
return compareOnlyKeyPortion(left, right);
}
@Override
@VisibleForTesting
public int compareOnlyKeyPortion(Cell left, Cell right) {
int c = Bytes.BYTES_RAWCOMPARATOR.compare(left.getRowArray(), left.getRowOffset(),
left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
if (c != 0) {
return c;
}
c = Bytes.BYTES_RAWCOMPARATOR.compare(left.getFamilyArray(), left.getFamilyOffset(),
left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(),
right.getFamilyLength());
if (c != 0) {
return c;
}
c = Bytes.BYTES_RAWCOMPARATOR.compare(left.getQualifierArray(), left.getQualifierOffset(),
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
right.getQualifierLength());
if (c != 0) {
return c;
}
c = compareTimestamps(left.getTimestamp(), right.getTimestamp());
if (c != 0) {
return c;
}
return (0xff & left.getTypeByte()) - (0xff & right.getTypeByte());
}
@Override
public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) {
return firstKeyInBlock;
}
}
/** /**
* HeapSize implementation * HeapSize implementation
* *

View File

@ -26,7 +26,6 @@ import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
/** /**
* Immutable POJO class for representing a table name. * Immutable POJO class for representing a table name.
@ -536,19 +535,4 @@ public final class TableName implements Comparable<TableName> {
return this.nameAsString.compareTo(tableName.getNameAsString()); return this.nameAsString.compareTo(tableName.getNameAsString());
} }
/**
* Get the appropriate row comparator for this table.
*
* @return The comparator.
* @deprecated The comparator is an internal property of the table. Should
* not have been exposed here
*/
@InterfaceAudience.Private
@Deprecated
public KVComparator getRowComparator() {
if(TableName.META_TABLE_NAME.equals(this)) {
return KeyValue.META_COMPARATOR;
}
return KeyValue.COMPARATOR;
}
} }

View File

@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -118,7 +117,7 @@ implements TableMap<ImmutableBytesWritable,Result> {
int numCols = columns.length; int numCols = columns.length;
if (numCols > 0) { if (numCols > 0) {
for (Cell value: r.listCells()) { for (Cell value: r.listCells()) {
byte [] column = KeyValue.makeColumn(CellUtil.cloneFamily(value), byte [] column = CellUtil.makeColumn(CellUtil.cloneFamily(value),
CellUtil.cloneQualifier(value)); CellUtil.cloneQualifier(value));
for (int i = 0; i < numCols; i++) { for (int i = 0; i < numCols; i++) {
if (Bytes.equals(column, columns[i])) { if (Bytes.equals(column, columns[i])) {

View File

@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@ -108,7 +107,7 @@ extends TableMapper<ImmutableBytesWritable,Result> implements Configurable {
int numCols = columns.length; int numCols = columns.length;
if (numCols > 0) { if (numCols > 0) {
for (Cell value: r.listCells()) { for (Cell value: r.listCells()) {
byte [] column = KeyValue.makeColumn(CellUtil.cloneFamily(value), byte [] column = CellUtil.makeColumn(CellUtil.cloneFamily(value),
CellUtil.cloneQualifier(value)); CellUtil.cloneQualifier(value));
for (int i = 0; i < numCols; i++) { for (int i = 0; i < numCols; i++) {
if (Bytes.equals(column, columns[i])) { if (Bytes.equals(column, columns[i])) {

View File

@ -27,7 +27,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
@ -213,7 +213,7 @@ implements Configurable {
* @throws IllegalArgumentException When familyAndQualifier is invalid. * @throws IllegalArgumentException When familyAndQualifier is invalid.
*/ */
private static void addColumn(Scan scan, byte[] familyAndQualifier) { private static void addColumn(Scan scan, byte[] familyAndQualifier) {
byte [][] fq = KeyValue.parseColumn(familyAndQualifier); byte [][] fq = CellUtil.parseColumn(familyAndQualifier);
if (fq.length == 1) { if (fq.length == 1) {
scan.addFamily(fq[0]); scan.addFamily(fq[0]);
} else if (fq.length == 2) { } else if (fq.length == 2) {

View File

@ -43,7 +43,6 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
@ -233,7 +232,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF) .type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
.build(); .build();
} }
byte [][] parts = KeyValue.parseColumn(col); byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) { if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST) return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF) .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
@ -301,7 +300,7 @@ public class RowResource extends ResourceBase {
.build(); .build();
} }
Put put = new Put(row); Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column); byte parts[][] = CellUtil.parseColumn(column);
if (parts.length != 2) { if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST) return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF) .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
@ -390,7 +389,7 @@ public class RowResource extends ResourceBase {
delete = new Delete(rowspec.getRow()); delete = new Delete(rowspec.getRow());
for (byte[] column: rowspec.getColumns()) { for (byte[] column: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(column); byte[][] split = CellUtil.parseColumn(column);
if (rowspec.hasTimestamp()) { if (rowspec.hasTimestamp()) {
if (split.length == 1) { if (split.length == 1) {
delete.addFamily(split[0], rowspec.getTimestamp()); delete.addFamily(split[0], rowspec.getTimestamp());
@ -473,7 +472,7 @@ public class RowResource extends ResourceBase {
boolean retValue; boolean retValue;
CellModel valueToCheckCell = cellModels.get(cellModelCount - 1); CellModel valueToCheckCell = cellModels.get(cellModelCount - 1);
byte[] valueToCheckColumn = valueToCheckCell.getColumn(); byte[] valueToCheckColumn = valueToCheckCell.getColumn();
byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn); byte[][] valueToPutParts = CellUtil.parseColumn(valueToCheckColumn);
if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) { if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
CellModel valueToPutCell = null; CellModel valueToPutCell = null;
@ -490,7 +489,7 @@ public class RowResource extends ResourceBase {
.build(); .build();
} }
byte [][] parts = KeyValue.parseColumn(col); byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) { if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST) return Response.status(Response.Status.BAD_REQUEST)
@ -606,7 +605,7 @@ public class RowResource extends ResourceBase {
.build(); .build();
} }
parts = KeyValue.parseColumn(col); parts = CellUtil.parseColumn(col);
if (parts.length == 1) { if (parts.length == 1) {
// Only Column Family is specified // Only Column Family is specified
@ -623,7 +622,7 @@ public class RowResource extends ResourceBase {
} }
} }
parts = KeyValue.parseColumn(valueToDeleteColumn); parts = CellUtil.parseColumn(valueToDeleteColumn);
if (parts.length == 2) { if (parts.length == 2) {
if (parts[1].length != 0) { if (parts[1].length != 0) {
// To support backcompat of deleting a cell // To support backcompat of deleting a cell
@ -722,7 +721,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF) .type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
.build(); .build();
} }
byte [][] parts = KeyValue.parseColumn(col); byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) { if (parts.length != 2) {
servlet.getMetrics().incrementFailedAppendRequests(1); servlet.getMetrics().incrementFailedAppendRequests(1);
return Response.status(Response.Status.BAD_REQUEST) return Response.status(Response.Status.BAD_REQUEST)
@ -816,7 +815,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF) .type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
.build(); .build();
} }
byte [][] parts = KeyValue.parseColumn(col); byte [][] parts = CellUtil.parseColumn(col);
if (parts.length != 2) { if (parts.length != 2) {
servlet.getMetrics().incrementFailedIncrementRequests(1); servlet.getMetrics().incrementFailedIncrementRequests(1);
return Response.status(Response.Status.BAD_REQUEST) return Response.status(Response.Status.BAD_REQUEST)

View File

@ -25,10 +25,10 @@ import java.util.NoSuchElementException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -50,7 +50,7 @@ public class RowResultGenerator extends ResultGenerator {
Get get = new Get(rowspec.getRow()); Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) { if (rowspec.hasColumns()) {
for (byte[] col: rowspec.getColumns()) { for (byte[] col: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(col); byte[][] split = CellUtil.parseColumn(col);
if (split.length == 1) { if (split.length == 1) {
get.addFamily(split[0]); get.addFamily(split[0]);
} else if (split.length == 2) { } else if (split.length == 2) {

View File

@ -35,7 +35,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellModel;
@ -175,7 +174,7 @@ public class ScannerInstanceResource extends ResourceBase {
response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value))); response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
response.header("X-Column", response.header("X-Column",
Base64.encodeBytes( Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value)))); CellUtil.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp()); response.header("X-Timestamp", value.getTimestamp());
servlet.getMetrics().incrementSucessfulGetRequests(1); servlet.getMetrics().incrementSucessfulGetRequests(1);
return response.build(); return response.build();

View File

@ -25,7 +25,7 @@ import java.util.Iterator;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.UnknownScannerException;
@ -80,7 +80,7 @@ public class ScannerResultGenerator extends ResultGenerator {
if (rowspec.hasColumns()) { if (rowspec.hasColumns()) {
byte[][] columns = rowspec.getColumns(); byte[][] columns = rowspec.getColumns();
for (byte[] column: columns) { for (byte[] column: columns) {
byte[][] split = KeyValue.parseColumn(column); byte[][] split = CellUtil.parseColumn(column);
if (split.length == 1) { if (split.length == 1) {
scan.addFamily(split[0]); scan.addFamily(split[0]);
} else if (split.length == 2) { } else if (split.length == 2) {

View File

@ -177,7 +177,7 @@ public class RemoteHTable implements Table {
for (RowModel row: model.getRows()) { for (RowModel row: model.getRows()) {
List<Cell> kvs = new ArrayList<>(row.getCells().size()); List<Cell> kvs = new ArrayList<>(row.getCells().size());
for (CellModel cell: row.getCells()) { for (CellModel cell: row.getCells()) {
byte[][] split = KeyValue.parseColumn(cell.getColumn()); byte[][] split = CellUtil.parseColumn(cell.getColumn());
byte[] column = split[0]; byte[] column = split[0];
byte[] qualifier = null; byte[] qualifier = null;
if (split.length == 1) { if (split.length == 1) {

View File

@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
@ -132,7 +131,7 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
*/ */
public CellModel(byte[] column, byte[] qualifier, long timestamp, public CellModel(byte[] column, byte[] qualifier, long timestamp,
byte[] value) { byte[] value) {
this.column = KeyValue.makeColumn(column, qualifier); this.column = CellUtil.makeColumn(column, qualifier);
this.timestamp = timestamp; this.timestamp = timestamp;
this.value = value; this.value = value;
} }

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
@ -93,7 +93,7 @@ public class TestScannerResource {
throws IOException { throws IOException {
Random rng = new Random(); Random rng = new Random();
byte[] k = new byte[3]; byte[] k = new byte[3];
byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(Bytes.toBytes(column));
List<Put> puts = new ArrayList<>(); List<Put> puts = new ArrayList<>();
for (byte b1 = 'a'; b1 < 'z'; b1++) { for (byte b1 = 'a'; b1 < 'z'; b1++) {
for (byte b2 = 'a'; b2 < 'z'; b2++) { for (byte b2 = 'a'; b2 < 'z'; b2++) {

View File

@ -295,7 +295,7 @@ public class TestScannersWithFilters {
for (CellModel cell: cells) { for (CellModel cell: cells) {
assertTrue("Row mismatch", assertTrue("Row mismatch",
Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx]))); Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx])));
byte[][] split = KeyValue.parseColumn(cell.getColumn()); byte[][] split = CellUtil.parseColumn(cell.getColumn());
assertTrue("Family mismatch", assertTrue("Family mismatch",
Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx]))); Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx])));
assertTrue("Qualifier mismatch", assertTrue("Qualifier mismatch",

View File

@ -21,7 +21,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
@ -92,7 +92,7 @@ public class TestScannersWithLabels {
private static int insertData(TableName tableName, String column, double prob) throws IOException { private static int insertData(TableName tableName, String column, double prob) throws IOException {
byte[] k = new byte[3]; byte[] k = new byte[3];
byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); byte[][] famAndQf = CellUtil.parseColumn(Bytes.toBytes(column));
List<Put> puts = new ArrayList<>(9); List<Put> puts = new ArrayList<>(9);
for (int i = 0; i < 9; i++) { for (int i = 0; i < 9; i++) {

View File

@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
@ -99,7 +99,7 @@ public class TestTableResource {
htd.addFamily(new HColumnDescriptor(COLUMN_FAMILY)); htd.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
admin.createTable(htd); admin.createTable(htd);
byte[] k = new byte[3]; byte[] k = new byte[3];
byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(COLUMN)); byte [][] famAndQf = CellUtil.parseColumn(Bytes.toBytes(COLUMN));
List<Put> puts = new ArrayList<>(); List<Put> puts = new ArrayList<>();
for (byte b1 = 'a'; b1 < 'z'; b1++) { for (byte b1 = 'a'; b1 < 'z'; b1++) {
for (byte b2 = 'a'; b2 < 'z'; b2++) { for (byte b2 = 'a'; b2 < 'z'; b2++) {

View File

@ -565,8 +565,8 @@ public class FixedFileTrailer {
} else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName()) } else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName())
|| comparatorClassName.equals(KeyValue.META_COMPARATOR.getClass().getName())) { || comparatorClassName.equals(KeyValue.META_COMPARATOR.getClass().getName())) {
comparatorKlass = MetaCellComparator.class; comparatorKlass = MetaCellComparator.class;
} else if (comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getClass().getName()) } else if (comparatorClassName.equals("org.apache.hadoop.hbase.KeyValue.RawBytesComparator")
|| comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName())) { || comparatorClassName.equals("org.apache.hadoop.hbase.util.Bytes$ByteArrayComparator")) {
// When the comparator to be used is Bytes.BYTES_RAWCOMPARATOR, we just return null from here // When the comparator to be used is Bytes.BYTES_RAWCOMPARATOR, we just return null from here
// Bytes.BYTES_RAWCOMPARATOR is not a CellComparator // Bytes.BYTES_RAWCOMPARATOR is not a CellComparator
comparatorKlass = null; comparatorKlass = null;

View File

@ -326,7 +326,7 @@ public abstract class HBaseTestCase extends TestCase {
} }
} }
byte[][] split = byte[][] split =
KeyValue.parseColumn(Bytes.toBytes(sb.toString())); CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
if(split.length == 1) { if(split.length == 1) {
byte[] qualifier = new byte[0]; byte[] qualifier = new byte[0];
put.addColumn(split[0], qualifier, t); put.addColumn(split[0], qualifier, t);

View File

@ -34,7 +34,7 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler; import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
import org.apache.hadoop.hbase.thrift.generated.TIncrement; import org.apache.hadoop.hbase.thrift.generated.TIncrement;
@ -196,7 +196,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
} }
private boolean internalQueueTincrement(TIncrement inc) throws TException { private boolean internalQueueTincrement(TIncrement inc) throws TException {
byte[][] famAndQf = KeyValue.parseColumn(inc.getColumn()); byte[][] famAndQf = CellUtil.parseColumn(inc.getColumn());
if (famAndQf.length != 2) return false; if (famAndQf.length != 2) return false;
return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], famAndQf[1], return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], famAndQf[1],

View File

@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -923,7 +924,7 @@ public class ThriftServerRunner implements Runnable {
ByteBuffer tableName, ByteBuffer row, ByteBuffer column, ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
Map<ByteBuffer, ByteBuffer> attributes) Map<ByteBuffer, ByteBuffer> attributes)
throws IOError { throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
return get(tableName, row, famAndQf[0], null, attributes); return get(tableName, row, famAndQf[0], null, attributes);
} }
@ -968,7 +969,7 @@ public class ThriftServerRunner implements Runnable {
@Override @Override
public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column, public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError { int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if(famAndQf.length == 1) { if(famAndQf.length == 1) {
return getVer(tableName, row, famAndQf[0], null, numVersions, attributes); return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
} }
@ -1014,7 +1015,7 @@ public class ThriftServerRunner implements Runnable {
@Override @Override
public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column, public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError { long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes); return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
} }
@ -1101,7 +1102,7 @@ public class ThriftServerRunner implements Runnable {
Get get = new Get(getBytes(row)); Get get = new Get(getBytes(row));
addAttributes(get, attributes); addAttributes(get, attributes);
for(ByteBuffer column : columns) { for(ByteBuffer column : columns) {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]); get.addFamily(famAndQf[0]);
} else { } else {
@ -1167,7 +1168,7 @@ public class ThriftServerRunner implements Runnable {
if (columns != null) { if (columns != null) {
for(ByteBuffer column : columns) { for(ByteBuffer column : columns) {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]); get.addFamily(famAndQf[0]);
} else { } else {
@ -1207,7 +1208,7 @@ public class ThriftServerRunner implements Runnable {
table = getTable(tableName); table = getTable(tableName);
Delete delete = new Delete(getBytes(row)); Delete delete = new Delete(getBytes(row));
addAttributes(delete, attributes); addAttributes(delete, attributes);
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
delete.addFamily(famAndQf[0], timestamp); delete.addFamily(famAndQf[0], timestamp);
} else { } else {
@ -1320,7 +1321,7 @@ public class ThriftServerRunner implements Runnable {
// I apologize for all this mess :) // I apologize for all this mess :)
for (Mutation m : mutations) { for (Mutation m : mutations) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column)); byte[][] famAndQf = CellUtil.parseColumn(getBytes(m.column));
if (m.isDelete) { if (m.isDelete) {
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
delete.addFamily(famAndQf[0], timestamp); delete.addFamily(famAndQf[0], timestamp);
@ -1379,7 +1380,7 @@ public class ThriftServerRunner implements Runnable {
Put put = new Put(row, timestamp); Put put = new Put(row, timestamp);
addAttributes(put, attributes); addAttributes(put, attributes);
for (Mutation m : mutations) { for (Mutation m : mutations) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column)); byte[][] famAndQf = CellUtil.parseColumn(getBytes(m.column));
if (m.isDelete) { if (m.isDelete) {
// no qualifier, family only. // no qualifier, family only.
if (famAndQf.length == 1) { if (famAndQf.length == 1) {
@ -1433,7 +1434,7 @@ public class ThriftServerRunner implements Runnable {
public long atomicIncrement( public long atomicIncrement(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount) ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
throws IOError, IllegalArgument, TException { throws IOError, IllegalArgument, TException {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte [][] famAndQf = CellUtil.parseColumn(getBytes(column));
if(famAndQf.length == 1) { if(famAndQf.length == 1) {
return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount); return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
} }
@ -1525,7 +1526,7 @@ public class ThriftServerRunner implements Runnable {
} }
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) { if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) { for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column)); byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) { if(famQf.length == 1) {
scan.addFamily(famQf[0]); scan.addFamily(famQf[0]);
} else { } else {
@ -1565,7 +1566,7 @@ public class ThriftServerRunner implements Runnable {
addAttributes(scan, attributes); addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) { if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) { for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column)); byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) { if(famQf.length == 1) {
scan.addFamily(famQf[0]); scan.addFamily(famQf[0]);
} else { } else {
@ -1595,7 +1596,7 @@ public class ThriftServerRunner implements Runnable {
addAttributes(scan, attributes); addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) { if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) { for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column)); byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) { if(famQf.length == 1) {
scan.addFamily(famQf[0]); scan.addFamily(famQf[0]);
} else { } else {
@ -1629,7 +1630,7 @@ public class ThriftServerRunner implements Runnable {
scan.setFilter(f); scan.setFilter(f);
if (columns != null && columns.size() != 0) { if (columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) { for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column)); byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) { if(famQf.length == 1) {
scan.addFamily(famQf[0]); scan.addFamily(famQf[0]);
} else { } else {
@ -1659,7 +1660,7 @@ public class ThriftServerRunner implements Runnable {
scan.setTimeRange(0, timestamp); scan.setTimeRange(0, timestamp);
if (columns != null && columns.size() != 0) { if (columns != null && columns.size() != 0) {
for (ByteBuffer column : columns) { for (ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column)); byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) { if(famQf.length == 1) {
scan.addFamily(famQf[0]); scan.addFamily(famQf[0]);
} else { } else {
@ -1690,7 +1691,7 @@ public class ThriftServerRunner implements Runnable {
scan.setTimeRange(0, timestamp); scan.setTimeRange(0, timestamp);
if (columns != null && columns.size() != 0) { if (columns != null && columns.size() != 0) {
for (ByteBuffer column : columns) { for (ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column)); byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) { if(famQf.length == 1) {
scan.addFamily(famQf[0]); scan.addFamily(famQf[0]);
} else { } else {
@ -1868,7 +1869,7 @@ public class ThriftServerRunner implements Runnable {
put = new Put(getBytes(row), HConstants.LATEST_TIMESTAMP); put = new Put(getBytes(row), HConstants.LATEST_TIMESTAMP);
addAttributes(put, attributes); addAttributes(put, attributes);
byte[][] famAndQf = KeyValue.parseColumn(getBytes(mput.column)); byte[][] famAndQf = CellUtil.parseColumn(getBytes(mput.column));
put.addImmutable(famAndQf[0], famAndQf[1], mput.value != null ? getBytes(mput.value) put.addImmutable(famAndQf[0], famAndQf[1], mput.value != null ? getBytes(mput.value)
: HConstants.EMPTY_BYTE_ARRAY); : HConstants.EMPTY_BYTE_ARRAY);
@ -1882,7 +1883,7 @@ public class ThriftServerRunner implements Runnable {
Table table = null; Table table = null;
try { try {
table = getTable(tableName); table = getTable(tableName);
byte[][] famAndQf = KeyValue.parseColumn(getBytes(column)); byte[][] famAndQf = CellUtil.parseColumn(getBytes(column));
return table.checkAndPut(getBytes(row), famAndQf[0], famAndQf[1], return table.checkAndPut(getBytes(row), famAndQf[0], famAndQf[1],
value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put); value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put);
} catch (IOException e) { } catch (IOException e) {

View File

@ -67,7 +67,7 @@ public class ThriftUtilities {
if (in.name == null || !in.name.hasRemaining()) { if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty"); throw new IllegalArgument("column name is empty");
} }
byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0]; byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName) HColumnDescriptor col = new HColumnDescriptor(parsedName)
.setMaxVersions(in.maxVersions) .setMaxVersions(in.maxVersions)
.setCompressionType(comp) .setCompressionType(comp)
@ -160,7 +160,7 @@ public class ThriftUtilities {
result.sortedColumns = new ArrayList<>(); result.sortedColumns = new ArrayList<>();
for (Cell kv : result_.rawCells()) { for (Cell kv : result_.rawCells()) {
result.sortedColumns.add(new TColumn( result.sortedColumns.add(new TColumn(
ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.cloneFamily(kv), ByteBuffer.wrap(CellUtil.makeColumn(CellUtil.cloneFamily(kv),
CellUtil.cloneQualifier(kv))), CellUtil.cloneQualifier(kv))),
new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), kv.getTimestamp()))); new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), kv.getTimestamp())));
} }
@ -168,7 +168,7 @@ public class ThriftUtilities {
result.columns = new TreeMap<>(); result.columns = new TreeMap<>();
for (Cell kv : result_.rawCells()) { for (Cell kv : result_.rawCells()) {
result.columns.put( result.columns.put(
ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.cloneFamily(kv), ByteBuffer.wrap(CellUtil.makeColumn(CellUtil.cloneFamily(kv),
CellUtil.cloneQualifier(kv))), CellUtil.cloneQualifier(kv))),
new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), kv.getTimestamp())); new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), kv.getTimestamp()));
} }
@ -203,7 +203,7 @@ public class ThriftUtilities {
*/ */
public static Increment incrementFromThrift(TIncrement tincrement) { public static Increment incrementFromThrift(TIncrement tincrement) {
Increment inc = new Increment(tincrement.getRow()); Increment inc = new Increment(tincrement.getRow());
byte[][] famAndQf = KeyValue.parseColumn(tincrement.getColumn()); byte[][] famAndQf = CellUtil.parseColumn(tincrement.getColumn());
if (famAndQf.length != 2) return null; if (famAndQf.length != 2) return null;
inc.addColumn(famAndQf[0], famAndQf[1], tincrement.getAmmount()); inc.addColumn(famAndQf[0], famAndQf[1], tincrement.getAmmount());
return inc; return inc;
@ -227,7 +227,7 @@ public class ThriftUtilities {
int length = columns.size(); int length = columns.size();
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(columns.get(i))); byte[][] famAndQf = CellUtil.parseColumn(getBytes(columns.get(i)));
append.addColumn(famAndQf[0], famAndQf[1], getBytes(values.get(i))); append.addColumn(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
} }
return append; return append;