HBASE-2274 [stargate] filter support: JSON descriptors
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@919066 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a4b99cf687
commit
baef12d73e
|
@ -427,6 +427,7 @@ Release 0.21.0 - Unreleased
|
|||
HBASE-2257 [stargate] multiuser mode
|
||||
HBASE-2263 [stargate] multiuser mode: authenticator for zookeeper
|
||||
HBASE-2273 [stargate] export metrics via Hadoop metrics, JMX, and zookeeper
|
||||
HBASE-2274 [stargate] filter support: JSON descriptors
|
||||
|
||||
OPTIMIZATIONS
|
||||
HBASE-410 [testing] Speed up the test suite
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
<properties>
|
||||
<jsr311.version>1.1.1</jsr311.version>
|
||||
<protobuf.version>2.1.0</protobuf.version>
|
||||
<protobuf.version>2.3.0</protobuf.version>
|
||||
<jersey.version>1.1.4.1</jersey.version>
|
||||
</properties>
|
||||
|
||||
|
|
|
@ -24,16 +24,26 @@ import java.io.IOException;
|
|||
import java.util.Iterator;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
|
||||
import org.json.JSONObject;
|
||||
|
||||
public abstract class ResultGenerator implements Iterator<KeyValue> {
|
||||
public static ResultGenerator fromRowSpec(String table, RowSpec rowspec)
|
||||
throws IOException {
|
||||
|
||||
public static ResultGenerator fromRowSpec(final String table,
|
||||
final RowSpec rowspec, final Filter filter) throws IOException {
|
||||
if (rowspec.isSingleRow()) {
|
||||
return new RowResultGenerator(table, rowspec);
|
||||
return new RowResultGenerator(table, rowspec, filter);
|
||||
} else {
|
||||
return new ScannerResultGenerator(table, rowspec);
|
||||
return new ScannerResultGenerator(table, rowspec, filter);
|
||||
}
|
||||
}
|
||||
|
||||
public static Filter buildFilter(String filter) throws Exception {
|
||||
return ScannerModel.buildFilter(new JSONObject(filter));
|
||||
}
|
||||
|
||||
public abstract void close();
|
||||
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class RowResource implements Constants {
|
|||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
ResultGenerator generator =
|
||||
ResultGenerator.fromRowSpec(actualTableName, rowspec);
|
||||
ResultGenerator.fromRowSpec(actualTableName, rowspec, null);
|
||||
if (!generator.hasNext()) {
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
}
|
||||
|
@ -133,7 +133,7 @@ public class RowResource implements Constants {
|
|||
}
|
||||
try {
|
||||
ResultGenerator generator =
|
||||
ResultGenerator.fromRowSpec(actualTableName, rowspec);
|
||||
ResultGenerator.fromRowSpec(actualTableName, rowspec, null);
|
||||
if (!generator.hasNext()) {
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
}
|
||||
|
|
|
@ -30,12 +30,13 @@ import org.apache.hadoop.hbase.client.Get;
|
|||
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
|
||||
public class RowResultGenerator extends ResultGenerator {
|
||||
private Iterator<KeyValue> valuesI;
|
||||
|
||||
public RowResultGenerator(String tableName, RowSpec rowspec)
|
||||
throws IllegalArgumentException, IOException {
|
||||
public RowResultGenerator(final String tableName, final RowSpec rowspec,
|
||||
final Filter filter) throws IllegalArgumentException, IOException {
|
||||
HTablePool pool = RESTServlet.getInstance().getTablePool();
|
||||
HTableInterface table = pool.getTable(tableName);
|
||||
try {
|
||||
|
@ -58,6 +59,9 @@ public class RowResultGenerator extends ResultGenerator {
|
|||
}
|
||||
get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
|
||||
get.setMaxVersions(rowspec.getMaxVersions());
|
||||
if (filter != null) {
|
||||
get.setFilter(filter);
|
||||
}
|
||||
Result result = table.get(get);
|
||||
if (result != null && !result.isEmpty()) {
|
||||
valuesI = result.list().iterator();
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
|
@ -41,6 +39,8 @@ import javax.ws.rs.core.UriInfo;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.auth.User;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
|
||||
|
@ -83,7 +83,9 @@ public class ScannerResource implements Constants {
|
|||
RowSpec spec = new RowSpec(model.getStartRow(), endRow,
|
||||
model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
|
||||
try {
|
||||
ScannerResultGenerator gen = new ScannerResultGenerator(actualTableName, spec);
|
||||
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
|
||||
ScannerResultGenerator gen =
|
||||
new ScannerResultGenerator(actualTableName, spec, filter);
|
||||
String id = gen.getID();
|
||||
ScannerInstanceResource instance =
|
||||
new ScannerInstanceResource(actualTableName, id, gen, model.getBatch());
|
||||
|
@ -96,11 +98,11 @@ public class ScannerResource implements Constants {
|
|||
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
|
||||
URI uri = builder.path(id).build();
|
||||
return Response.created(uri).build();
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
|
||||
} catch (IOException e) {
|
||||
throw new WebApplicationException(e,
|
||||
Response.Status.SERVICE_UNAVAILABLE);
|
||||
} catch (Exception e) {
|
||||
throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,19 +33,31 @@ import org.apache.hadoop.hbase.client.HTablePool;
|
|||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
public class ScannerResultGenerator extends ResultGenerator {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(ScannerResultGenerator.class);
|
||||
|
||||
public static Filter buildFilterFromModel(ScannerModel model)
|
||||
throws Exception {
|
||||
String filter = model.getFilter();
|
||||
if (filter == null || filter.length() == 0) {
|
||||
return null;
|
||||
}
|
||||
return buildFilter(filter);
|
||||
}
|
||||
|
||||
private String id;
|
||||
private Iterator<KeyValue> rowI;
|
||||
private ResultScanner scanner;
|
||||
private Result cached;
|
||||
|
||||
public ScannerResultGenerator(String tableName, RowSpec rowspec)
|
||||
throws IllegalArgumentException, IOException {
|
||||
public ScannerResultGenerator(final String tableName, final RowSpec rowspec,
|
||||
final Filter filter) throws IllegalArgumentException, IOException {
|
||||
HTablePool pool = RESTServlet.getInstance().getTablePool();
|
||||
HTableInterface table = pool.getTable(tableName);
|
||||
try {
|
||||
|
@ -59,7 +71,7 @@ public class ScannerResultGenerator extends ResultGenerator {
|
|||
byte[][] columns = rowspec.getColumns();
|
||||
for (byte[] column: columns) {
|
||||
byte[][] split = KeyValue.parseColumn(column);
|
||||
if (split.length == 2 && split[1].length != 0) {
|
||||
if (split.length > 1 && (split[1] != null && split[1].length != 0)) {
|
||||
scan.addColumn(split[0], split[1]);
|
||||
} else {
|
||||
scan.addFamily(split[0]);
|
||||
|
@ -73,6 +85,11 @@ public class ScannerResultGenerator extends ResultGenerator {
|
|||
}
|
||||
scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
|
||||
scan.setMaxVersions(rowspec.getMaxVersions());
|
||||
if (filter != null) {
|
||||
scan.setFilter(filter);
|
||||
}
|
||||
// always disable block caching on the cluster
|
||||
scan.setCacheBlocks(false);
|
||||
scanner = table.getScanner(scan);
|
||||
cached = null;
|
||||
id = Long.toString(System.currentTimeMillis()) +
|
||||
|
@ -145,4 +162,5 @@ public class ScannerResultGenerator extends ResultGenerator {
|
|||
public void remove() {
|
||||
throw new UnsupportedOperationException("remove not supported");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,10 +30,36 @@ import javax.xml.bind.annotation.XmlElement;
|
|||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.filter.BinaryComparator;
|
||||
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
|
||||
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.filter.FilterList;
|
||||
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
|
||||
import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
|
||||
import org.apache.hadoop.hbase.filter.PageFilter;
|
||||
import org.apache.hadoop.hbase.filter.PrefixFilter;
|
||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||
import org.apache.hadoop.hbase.filter.RegexStringComparator;
|
||||
import org.apache.hadoop.hbase.filter.RowFilter;
|
||||
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
||||
import org.apache.hadoop.hbase.filter.SkipFilter;
|
||||
import org.apache.hadoop.hbase.filter.SubstringComparator;
|
||||
import org.apache.hadoop.hbase.filter.ValueFilter;
|
||||
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
||||
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONStringer;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
|
||||
/**
|
||||
|
@ -44,16 +70,41 @@ import com.google.protobuf.ByteString;
|
|||
* <sequence>
|
||||
* <element name="column" type="base64Binary" minOccurs="0" maxOccurs="unbounded"/>
|
||||
* </sequence>
|
||||
* <element name="filter" type="string" minOccurs="0" maxOccurs="1"></element>
|
||||
* <attribute name="startRow" type="base64Binary"></attribute>
|
||||
* <attribute name="endRow" type="base64Binary"></attribute>
|
||||
* <attribute name="batch" type="int"></attribute>
|
||||
* <attribute name="startTime" type="int"></attribute>
|
||||
* <attribute name="endTime" type="int"></attribute>
|
||||
* <attribute name="maxVersions" type="int"></attribute>
|
||||
* </complexType>
|
||||
* </pre>
|
||||
*/
|
||||
@XmlRootElement(name="Scanner")
|
||||
public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
||||
|
||||
static enum FilterType {
|
||||
ColumnCountGetFilter,
|
||||
FilterList,
|
||||
FirstKeyOnlyFilter,
|
||||
InclusiveStopFilter,
|
||||
PageFilter,
|
||||
PrefixFilter,
|
||||
QualifierFilter,
|
||||
RowFilter,
|
||||
SingleColumnValueFilter,
|
||||
SkipFilter,
|
||||
ValueFilter,
|
||||
WhileMatchFilter
|
||||
}
|
||||
|
||||
static enum ComparatorType {
|
||||
BinaryComparator,
|
||||
BinaryPrefixComparator,
|
||||
RegexStringComparator,
|
||||
SubstringComparator
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private byte[] startRow = HConstants.EMPTY_START_ROW;
|
||||
|
@ -62,6 +113,244 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
private int batch = 1;
|
||||
private long startTime = 0;
|
||||
private long endTime = Long.MAX_VALUE;
|
||||
private String filter;
|
||||
private int maxVersions = 1;
|
||||
|
||||
/**
|
||||
* @param o the JSONObject under construction
|
||||
* @return the JSONObject under construction
|
||||
* @throws Exception
|
||||
*/
|
||||
public static WritableByteArrayComparable
|
||||
buildWritableByteArrayComparable(final JSONObject o) throws Exception {
|
||||
String type = o.getString("type");
|
||||
String value = o.getString("value");
|
||||
WritableByteArrayComparable comparator;
|
||||
switch (ComparatorType.valueOf(type)) {
|
||||
case BinaryComparator: {
|
||||
comparator = new BinaryComparator(Base64.decode(value));
|
||||
} break;
|
||||
case BinaryPrefixComparator: {
|
||||
comparator = new BinaryPrefixComparator(Base64.decode(value));
|
||||
} break;
|
||||
case RegexStringComparator: {
|
||||
comparator = new RegexStringComparator(value);
|
||||
} break;
|
||||
case SubstringComparator: {
|
||||
comparator = new SubstringComparator(value);
|
||||
} break;
|
||||
default: {
|
||||
throw new RuntimeException("unhandled comparator type: " + type);
|
||||
}
|
||||
}
|
||||
return comparator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param o the JSONObject under construction
|
||||
* @return the JSONObject under construction
|
||||
* @throws Exception
|
||||
*/
|
||||
public static Filter buildFilter(final JSONObject o) throws Exception {
|
||||
String type = o.getString("type");
|
||||
Filter filter;
|
||||
switch (FilterType.valueOf(type)) {
|
||||
case ColumnCountGetFilter: {
|
||||
filter = new ColumnCountGetFilter(o.getInt("limit"));
|
||||
} break;
|
||||
case FilterList: {
|
||||
JSONArray arr = o.getJSONArray("filters");
|
||||
List<Filter> filters = new ArrayList<Filter>(arr.length());
|
||||
for (int i = 0; i < arr.length(); i++) {
|
||||
filters.add(buildFilter(arr.getJSONObject(i)));
|
||||
}
|
||||
filter = new FilterList(
|
||||
FilterList.Operator.valueOf(o.getString("op")),
|
||||
filters);
|
||||
} break;
|
||||
case FirstKeyOnlyFilter: {
|
||||
filter = new FirstKeyOnlyFilter();
|
||||
} break;
|
||||
case InclusiveStopFilter: {
|
||||
filter = new InclusiveStopFilter(Base64.decode(o.getString("value")));
|
||||
} break;
|
||||
case PageFilter: {
|
||||
filter = new PageFilter(o.getLong("value"));
|
||||
} break;
|
||||
case PrefixFilter: {
|
||||
filter = new PrefixFilter(Base64.decode(o.getString("value")));
|
||||
} break;
|
||||
case QualifierFilter: {
|
||||
filter = new QualifierFilter(CompareOp.valueOf(o.getString("op")),
|
||||
buildWritableByteArrayComparable(o.getJSONObject("comparator")));
|
||||
} break;
|
||||
case RowFilter: {
|
||||
filter = new RowFilter(CompareOp.valueOf(o.getString("op")),
|
||||
buildWritableByteArrayComparable(o.getJSONObject("comparator")));
|
||||
} break;
|
||||
case SingleColumnValueFilter: {
|
||||
filter = new SingleColumnValueFilter(
|
||||
Base64.decode(o.getString("family")),
|
||||
o.has("qualifier") ? Base64.decode(o.getString("qualifier")) : null,
|
||||
CompareOp.valueOf(o.getString("op")),
|
||||
buildWritableByteArrayComparable(o.getJSONObject("comparator")));
|
||||
if (o.has("ifMissing")) {
|
||||
((SingleColumnValueFilter)filter)
|
||||
.setFilterIfMissing(o.getBoolean("ifMissing"));
|
||||
}
|
||||
if (o.has("latestVersion")) {
|
||||
((SingleColumnValueFilter)filter)
|
||||
.setLatestVersionOnly(o.getBoolean("latestVersion"));
|
||||
}
|
||||
} break;
|
||||
case SkipFilter: {
|
||||
filter = new SkipFilter(buildFilter(o.getJSONObject("filter")));
|
||||
} break;
|
||||
case ValueFilter: {
|
||||
filter = new ValueFilter(CompareOp.valueOf(o.getString("op")),
|
||||
buildWritableByteArrayComparable(o.getJSONObject("comparator")));
|
||||
} break;
|
||||
case WhileMatchFilter: {
|
||||
filter = new WhileMatchFilter(buildFilter(o.getJSONObject("filter")));
|
||||
} break;
|
||||
default: {
|
||||
throw new RuntimeException("unhandled filter type: " + type);
|
||||
}
|
||||
}
|
||||
return filter;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param s the JSONStringer
|
||||
* @param comparator the comparator
|
||||
* @return the JSONStringer
|
||||
* @throws Exception
|
||||
*/
|
||||
public static JSONStringer stringifyComparator(final JSONStringer s,
|
||||
final WritableByteArrayComparable comparator) throws Exception {
|
||||
String typeName = comparator.getClass().getSimpleName();
|
||||
ComparatorType type = ComparatorType.valueOf(typeName);
|
||||
s.object();
|
||||
s.key("type").value(typeName);
|
||||
switch (type) {
|
||||
case BinaryComparator:
|
||||
case BinaryPrefixComparator:
|
||||
s.key("value").value(Base64.encodeBytes(comparator.getValue()));
|
||||
break;
|
||||
case RegexStringComparator:
|
||||
case SubstringComparator:
|
||||
s.key("value").value(Bytes.toString(comparator.getValue()));
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("unhandled filter type: " + type);
|
||||
}
|
||||
s.endObject();
|
||||
return s;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param s the JSONStringer
|
||||
* @param filter the filter
|
||||
* @return the JSONStringer
|
||||
* @throws Exception
|
||||
*/
|
||||
public static JSONStringer stringifyFilter(final JSONStringer s,
|
||||
final Filter filter) throws Exception {
|
||||
String typeName = filter.getClass().getSimpleName();
|
||||
FilterType type;
|
||||
try {
|
||||
type = FilterType.valueOf(typeName);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new RuntimeException("filter type " + typeName + " not supported");
|
||||
}
|
||||
s.object();
|
||||
s.key("type").value(typeName);
|
||||
switch (type) {
|
||||
case ColumnCountGetFilter:
|
||||
s.key("limit").value(((ColumnCountGetFilter)filter).getLimit());
|
||||
break;
|
||||
case FilterList:
|
||||
s.key("op").value(((FilterList)filter).getOperator().toString());
|
||||
s.key("filters").array();
|
||||
for (Filter child: ((FilterList)filter).getFilters()) {
|
||||
stringifyFilter(s, child);
|
||||
}
|
||||
s.endArray();
|
||||
break;
|
||||
case FirstKeyOnlyFilter:
|
||||
break;
|
||||
case InclusiveStopFilter:
|
||||
s.key("value").value(
|
||||
Base64.encodeBytes(((InclusiveStopFilter)filter).getStopRowKey()));
|
||||
break;
|
||||
case PageFilter:
|
||||
s.key("value").value(((PageFilter)filter).getPageSize());
|
||||
break;
|
||||
case PrefixFilter:
|
||||
s.key("value")
|
||||
.value(Base64.encodeBytes(((PrefixFilter)filter).getPrefix()));
|
||||
break;
|
||||
case QualifierFilter:
|
||||
case RowFilter:
|
||||
case ValueFilter:
|
||||
s.key("op").value(((CompareFilter)filter).getOperator().toString());
|
||||
s.key("comparator");
|
||||
stringifyComparator(s, ((CompareFilter)filter).getComparator());
|
||||
break;
|
||||
case SingleColumnValueFilter: {
|
||||
SingleColumnValueFilter scvf = (SingleColumnValueFilter) filter;
|
||||
s.key("family").value(scvf.getFamily());
|
||||
byte[] qualifier = scvf.getQualifier();
|
||||
if (qualifier != null) {
|
||||
s.key("qualifier").value(qualifier);
|
||||
}
|
||||
s.key("op").value(scvf.getOperator().toString());
|
||||
s.key("comparator");
|
||||
stringifyComparator(s, scvf.getComparator());
|
||||
if (scvf.getFilterIfMissing()) {
|
||||
s.key("ifMissing").value(true);
|
||||
}
|
||||
if (scvf.getLatestVersionOnly()) {
|
||||
s.key("latestVersion").value(true);
|
||||
}
|
||||
} break;
|
||||
case SkipFilter:
|
||||
s.key("filter");
|
||||
stringifyFilter(s, ((SkipFilter)filter).getFilter());
|
||||
break;
|
||||
case WhileMatchFilter:
|
||||
s.key("filter");
|
||||
stringifyFilter(s, ((WhileMatchFilter)filter).getFilter());
|
||||
break;
|
||||
}
|
||||
s.endObject();
|
||||
return s;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param scan the scan specification
|
||||
* @throws Exception
|
||||
*/
|
||||
public static ScannerModel fromScan(Scan scan) throws Exception {
|
||||
ScannerModel model = new ScannerModel();
|
||||
model.setStartRow(scan.getStartRow());
|
||||
model.setEndRow(scan.getStopRow());
|
||||
byte[][] families = scan.getFamilies();
|
||||
if (families != null) {
|
||||
for (byte[] column: families) {
|
||||
model.addColumn(column);
|
||||
}
|
||||
}
|
||||
model.setStartTime(scan.getTimeRange().getMin());
|
||||
model.setEndTime(scan.getTimeRange().getMax());
|
||||
model.setBatch(scan.getCaching());
|
||||
model.setMaxVersions(scan.getMaxVersions());
|
||||
Filter filter = scan.getFilter();
|
||||
if (filter != null) {
|
||||
model.setFilter(stringifyFilter(new JSONStringer(), filter).toString());
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
|
@ -75,16 +364,20 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
* @param columns the columns to scan
|
||||
* @param batch the number of values to return in batch
|
||||
* @param endTime the upper bound on timestamps of values of interest
|
||||
* @param maxVersions the maximum number of versions to return
|
||||
* @param filter a filter specification
|
||||
* (values with timestamps later than this are excluded)
|
||||
*/
|
||||
public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns,
|
||||
int batch, long endTime) {
|
||||
int batch, long endTime, int maxVersions, String filter) {
|
||||
super();
|
||||
this.startRow = startRow;
|
||||
this.endRow = endRow;
|
||||
this.columns = columns;
|
||||
this.batch = batch;
|
||||
this.endTime = endTime;
|
||||
this.maxVersions = maxVersions;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -97,9 +390,10 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
* (values with timestamps earlier than this are excluded)
|
||||
* @param endTime the upper bound on timestamps of values of interest
|
||||
* (values with timestamps later than this are excluded)
|
||||
* @param filter a filter specification
|
||||
*/
|
||||
public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns,
|
||||
int batch, long startTime, long endTime) {
|
||||
int batch, long startTime, long endTime, String filter) {
|
||||
super();
|
||||
this.startRow = startRow;
|
||||
this.endRow = endRow;
|
||||
|
@ -107,6 +401,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
this.batch = batch;
|
||||
this.startTime = startTime;
|
||||
this.endTime = endTime;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -179,6 +474,22 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
return endTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return maximum number of versions to return
|
||||
*/
|
||||
@XmlAttribute
|
||||
public int getMaxVersions() {
|
||||
return maxVersions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the filter specification
|
||||
*/
|
||||
@XmlElement
|
||||
public String getFilter() {
|
||||
return filter;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param startRow start row
|
||||
*/
|
||||
|
@ -207,6 +518,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
this.batch = batch;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param maxVersions maximum number of versions to return
|
||||
*/
|
||||
public void setMaxVersions(int maxVersions) {
|
||||
this.maxVersions = maxVersions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param startTime the lower bound on timestamps of values of interest
|
||||
*/
|
||||
|
@ -221,6 +539,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filter the filter specification
|
||||
*/
|
||||
public void setFilter(String filter) {
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] createProtobufOutput() {
|
||||
Scanner.Builder builder = Scanner.newBuilder();
|
||||
|
@ -241,6 +566,10 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
builder.setEndTime(endTime);
|
||||
}
|
||||
builder.setBatch(getBatch());
|
||||
builder.setMaxVersions(maxVersions);
|
||||
if (filter != null) {
|
||||
builder.setFilter(filter);
|
||||
}
|
||||
return builder.build().toByteArray();
|
||||
}
|
||||
|
||||
|
@ -267,6 +596,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
|||
if (builder.hasEndTime()) {
|
||||
endTime = builder.getEndTime();
|
||||
}
|
||||
if (builder.hasMaxVersions()) {
|
||||
maxVersions = builder.getMaxVersions();
|
||||
}
|
||||
if (builder.hasFilter()) {
|
||||
filter = builder.getFilter();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: CellMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class CellMessage {
|
|||
public static final class Cell extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Cell.newBuilder() to construct.
|
||||
private Cell() {}
|
||||
private Cell() {
|
||||
initFields();
|
||||
}
|
||||
private Cell(boolean noInit) {}
|
||||
|
||||
private static final Cell defaultInstance = new Cell();
|
||||
private static final Cell defaultInstance;
|
||||
public static Cell getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class CellMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_fieldAccessorTable;
|
||||
|
@ -60,14 +63,15 @@ public final class CellMessage {
|
|||
public boolean hasData() { return hasData; }
|
||||
public com.google.protobuf.ByteString getData() { return data_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasRow()) {
|
||||
output.writeBytes(1, getRow());
|
||||
}
|
||||
|
@ -84,7 +88,6 @@ public final class CellMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -118,7 +121,7 @@ public final class CellMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -129,7 +132,7 @@ public final class CellMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -140,21 +143,30 @@ public final class CellMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -163,43 +175,49 @@ public final class CellMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.getDescriptor();
|
||||
|
@ -209,10 +227,12 @@ public final class CellMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -220,7 +240,7 @@ public final class CellMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -229,13 +249,13 @@ public final class CellMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell)other);
|
||||
|
@ -263,18 +283,9 @@ public final class CellMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -331,7 +342,7 @@ public final class CellMessage {
|
|||
}
|
||||
public Builder clearRow() {
|
||||
result.hasRow = false;
|
||||
result.row_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.row_ = getDefaultInstance().getRow();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -352,7 +363,7 @@ public final class CellMessage {
|
|||
}
|
||||
public Builder clearColumn() {
|
||||
result.hasColumn = false;
|
||||
result.column_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.column_ = getDefaultInstance().getColumn();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -391,14 +402,20 @@ public final class CellMessage {
|
|||
}
|
||||
public Builder clearData() {
|
||||
result.hasData = false;
|
||||
result.data_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.data_ = getDefaultInstance().getData();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Cell)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.getDescriptor();
|
||||
defaultInstance = new Cell(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Cell)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -414,11 +431,12 @@ public final class CellMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\021CellMessage.proto\0223org.apache.hadoop.h" +
|
||||
"base.stargate.protobuf.generated\"D\n\004Cell" +
|
||||
"\022\013\n\003row\030\001 \001(\014\022\016\n\006column\030\002 \001(\014\022\021\n\ttimesta" +
|
||||
"mp\030\003 \001(\003\022\014\n\004data\030\004 \001(\014";
|
||||
"mp\030\003 \001(\003\022\014\n\004data\030\004 \001(\014"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -440,4 +458,8 @@ public final class CellMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: CellSetMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class CellSetMessage {
|
|||
public static final class CellSet extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use CellSet.newBuilder() to construct.
|
||||
private CellSet() {}
|
||||
private CellSet() {
|
||||
initFields();
|
||||
}
|
||||
private CellSet(boolean noInit) {}
|
||||
|
||||
private static final CellSet defaultInstance = new CellSet();
|
||||
private static final CellSet defaultInstance;
|
||||
public static CellSet getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class CellSetMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_fieldAccessorTable;
|
||||
|
@ -35,9 +38,12 @@ public final class CellSetMessage {
|
|||
public static final class Row extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Row.newBuilder() to construct.
|
||||
private Row() {}
|
||||
private Row() {
|
||||
initFields();
|
||||
}
|
||||
private Row(boolean noInit) {}
|
||||
|
||||
private static final Row defaultInstance = new Row();
|
||||
private static final Row defaultInstance;
|
||||
public static Row getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -51,7 +57,6 @@ public final class CellSetMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_fieldAccessorTable;
|
||||
|
@ -76,15 +81,16 @@ public final class CellSetMessage {
|
|||
return values_.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasKey) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasKey()) {
|
||||
output.writeBytes(1, getKey());
|
||||
}
|
||||
|
@ -95,7 +101,6 @@ public final class CellSetMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -121,7 +126,7 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -132,7 +137,7 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -143,21 +148,30 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -166,43 +180,49 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.getDescriptor();
|
||||
|
@ -212,10 +232,12 @@ public final class CellSetMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -223,7 +245,7 @@ public final class CellSetMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -232,7 +254,8 @@ public final class CellSetMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.values_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.values_ =
|
||||
java.util.Collections.unmodifiableList(result.values_);
|
||||
|
@ -242,7 +265,6 @@ public final class CellSetMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row)other);
|
||||
|
@ -267,18 +289,9 @@ public final class CellSetMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -329,7 +342,7 @@ public final class CellSetMessage {
|
|||
}
|
||||
public Builder clearKey() {
|
||||
result.hasKey = false;
|
||||
result.key_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.key_ = getDefaultInstance().getKey();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -383,11 +396,17 @@ public final class CellSetMessage {
|
|||
result.values_ = java.util.Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.getDescriptor();
|
||||
defaultInstance = new Row(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row)
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row rows = 1;
|
||||
|
@ -402,7 +421,8 @@ public final class CellSetMessage {
|
|||
return rows_.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
|
@ -410,9 +430,9 @@ public final class CellSetMessage {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
output.writeMessage(1, element);
|
||||
}
|
||||
|
@ -420,7 +440,6 @@ public final class CellSetMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -442,7 +461,7 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -453,7 +472,7 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -464,21 +483,30 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -487,43 +515,49 @@ public final class CellSetMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.getDescriptor();
|
||||
|
@ -533,10 +567,12 @@ public final class CellSetMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -544,7 +580,7 @@ public final class CellSetMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -553,7 +589,8 @@ public final class CellSetMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.rows_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.rows_ =
|
||||
java.util.Collections.unmodifiableList(result.rows_);
|
||||
|
@ -563,7 +600,6 @@ public final class CellSetMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet)other);
|
||||
|
@ -585,18 +621,9 @@ public final class CellSetMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -676,11 +703,17 @@ public final class CellSetMessage {
|
|||
result.rows_ = java.util.Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.getDescriptor();
|
||||
defaultInstance = new CellSet(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -701,7 +734,7 @@ public final class CellSetMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024CellSetMessage.proto\0223org.apache.hadoo" +
|
||||
"p.hbase.stargate.protobuf.generated\032\021Cel" +
|
||||
"lMessage.proto\"\270\001\n\007CellSet\022N\n\004rows\030\001 \003(\013" +
|
||||
|
@ -709,7 +742,8 @@ public final class CellSetMessage {
|
|||
"obuf.generated.CellSet.Row\032]\n\003Row\022\013\n\003key" +
|
||||
"\030\001 \002(\014\022I\n\006values\030\002 \003(\01329.org.apache.hado" +
|
||||
"op.hbase.stargate.protobuf.generated.Cel" +
|
||||
"l";
|
||||
"l"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -740,4 +774,8 @@ public final class CellSetMessage {
|
|||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: ColumnSchemaMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class ColumnSchemaMessage {
|
|||
public static final class ColumnSchema extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use ColumnSchema.newBuilder() to construct.
|
||||
private ColumnSchema() {}
|
||||
private ColumnSchema() {
|
||||
initFields();
|
||||
}
|
||||
private ColumnSchema(boolean noInit) {}
|
||||
|
||||
private static final ColumnSchema defaultInstance = new ColumnSchema();
|
||||
private static final ColumnSchema defaultInstance;
|
||||
public static ColumnSchema getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class ColumnSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable;
|
||||
|
@ -35,9 +38,12 @@ public final class ColumnSchemaMessage {
|
|||
public static final class Attribute extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Attribute.newBuilder() to construct.
|
||||
private Attribute() {}
|
||||
private Attribute() {
|
||||
initFields();
|
||||
}
|
||||
private Attribute(boolean noInit) {}
|
||||
|
||||
private static final Attribute defaultInstance = new Attribute();
|
||||
private static final Attribute defaultInstance;
|
||||
public static Attribute getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -51,7 +57,6 @@ public final class ColumnSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
|
||||
|
@ -71,16 +76,17 @@ public final class ColumnSchemaMessage {
|
|||
public boolean hasValue() { return hasValue; }
|
||||
public java.lang.String getValue() { return value_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
if (!hasValue) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -91,7 +97,6 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -117,7 +122,7 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -128,7 +133,7 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -139,21 +144,30 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -162,43 +176,49 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDescriptor();
|
||||
|
@ -208,10 +228,12 @@ public final class ColumnSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -219,7 +241,7 @@ public final class ColumnSchemaMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -228,13 +250,13 @@ public final class ColumnSchemaMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute)other);
|
||||
|
@ -256,18 +278,9 @@ public final class ColumnSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -316,7 +329,7 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -337,14 +350,20 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public Builder clearValue() {
|
||||
result.hasValue = false;
|
||||
result.value_ = "";
|
||||
result.value_ = getDefaultInstance().getValue();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.getDescriptor();
|
||||
defaultInstance = new Attribute(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute)
|
||||
}
|
||||
|
||||
// optional string name = 1;
|
||||
|
@ -387,7 +406,8 @@ public final class ColumnSchemaMessage {
|
|||
public boolean hasCompression() { return hasCompression; }
|
||||
public java.lang.String getCompression() { return compression_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
|
@ -395,9 +415,9 @@ public final class ColumnSchemaMessage {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -417,7 +437,6 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -455,7 +474,7 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -466,7 +485,7 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -477,21 +496,30 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -500,43 +528,49 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDescriptor();
|
||||
|
@ -546,10 +580,12 @@ public final class ColumnSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -557,7 +593,7 @@ public final class ColumnSchemaMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -566,7 +602,8 @@ public final class ColumnSchemaMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.attrs_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.attrs_ =
|
||||
java.util.Collections.unmodifiableList(result.attrs_);
|
||||
|
@ -576,7 +613,6 @@ public final class ColumnSchemaMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema)other);
|
||||
|
@ -610,18 +646,9 @@ public final class ColumnSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -684,7 +711,7 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -792,14 +819,20 @@ public final class ColumnSchemaMessage {
|
|||
}
|
||||
public Builder clearCompression() {
|
||||
result.hasCompression = false;
|
||||
result.compression_ = "";
|
||||
result.compression_ = getDefaultInstance().getCompression();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.getDescriptor();
|
||||
defaultInstance = new ColumnSchema(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -820,7 +853,7 @@ public final class ColumnSchemaMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\031ColumnSchemaMessage.proto\0223org.apache." +
|
||||
"hadoop.hbase.stargate.protobuf.generated" +
|
||||
"\"\331\001\n\014ColumnSchema\022\014\n\004name\030\001 \001(\t\022Z\n\005attrs" +
|
||||
|
@ -828,7 +861,8 @@ public final class ColumnSchemaMessage {
|
|||
"e.protobuf.generated.ColumnSchema.Attrib" +
|
||||
"ute\022\013\n\003ttl\030\003 \001(\005\022\023\n\013maxVersions\030\004 \001(\005\022\023\n" +
|
||||
"\013compression\030\005 \001(\t\032(\n\tAttribute\022\014\n\004name\030" +
|
||||
"\001 \002(\t\022\r\n\005value\030\002 \002(\t";
|
||||
"\001 \002(\t\022\r\n\005value\030\002 \002(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -858,4 +892,8 @@ public final class ColumnSchemaMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: ScannerMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class ScannerMessage {
|
|||
public static final class Scanner extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Scanner.newBuilder() to construct.
|
||||
private Scanner() {}
|
||||
private Scanner() {
|
||||
initFields();
|
||||
}
|
||||
private Scanner(boolean noInit) {}
|
||||
|
||||
private static final Scanner defaultInstance = new Scanner();
|
||||
private static final Scanner defaultInstance;
|
||||
public static Scanner getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class ScannerMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable;
|
||||
|
@ -79,14 +82,29 @@ public final class ScannerMessage {
|
|||
public boolean hasEndTime() { return hasEndTime; }
|
||||
public long getEndTime() { return endTime_; }
|
||||
|
||||
@Override
|
||||
// optional int32 maxVersions = 7;
|
||||
public static final int MAXVERSIONS_FIELD_NUMBER = 7;
|
||||
private boolean hasMaxVersions;
|
||||
private int maxVersions_ = 0;
|
||||
public boolean hasMaxVersions() { return hasMaxVersions; }
|
||||
public int getMaxVersions() { return maxVersions_; }
|
||||
|
||||
// optional string filter = 8;
|
||||
public static final int FILTER_FIELD_NUMBER = 8;
|
||||
private boolean hasFilter;
|
||||
private java.lang.String filter_ = "";
|
||||
public boolean hasFilter() { return hasFilter; }
|
||||
public java.lang.String getFilter() { return filter_; }
|
||||
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasStartRow()) {
|
||||
output.writeBytes(1, getStartRow());
|
||||
}
|
||||
|
@ -105,11 +123,16 @@ public final class ScannerMessage {
|
|||
if (hasEndTime()) {
|
||||
output.writeInt64(6, getEndTime());
|
||||
}
|
||||
if (hasMaxVersions()) {
|
||||
output.writeInt32(7, getMaxVersions());
|
||||
}
|
||||
if (hasFilter()) {
|
||||
output.writeString(8, getFilter());
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -144,6 +167,14 @@ public final class ScannerMessage {
|
|||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeInt64Size(6, getEndTime());
|
||||
}
|
||||
if (hasMaxVersions()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeInt32Size(7, getMaxVersions());
|
||||
}
|
||||
if (hasFilter()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeStringSize(8, getFilter());
|
||||
}
|
||||
size += getUnknownFields().getSerializedSize();
|
||||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
|
@ -156,7 +187,7 @@ public final class ScannerMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -167,7 +198,7 @@ public final class ScannerMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -178,21 +209,30 @@ public final class ScannerMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -201,43 +241,49 @@ public final class ScannerMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDescriptor();
|
||||
|
@ -247,10 +293,12 @@ public final class ScannerMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -258,7 +306,7 @@ public final class ScannerMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -267,7 +315,8 @@ public final class ScannerMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.columns_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.columns_ =
|
||||
java.util.Collections.unmodifiableList(result.columns_);
|
||||
|
@ -277,7 +326,6 @@ public final class ScannerMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner)other);
|
||||
|
@ -310,22 +358,19 @@ public final class ScannerMessage {
|
|||
if (other.hasEndTime()) {
|
||||
setEndTime(other.getEndTime());
|
||||
}
|
||||
if (other.hasMaxVersions()) {
|
||||
setMaxVersions(other.getMaxVersions());
|
||||
}
|
||||
if (other.hasFilter()) {
|
||||
setFilter(other.getFilter());
|
||||
}
|
||||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -368,6 +413,14 @@ public final class ScannerMessage {
|
|||
setEndTime(input.readInt64());
|
||||
break;
|
||||
}
|
||||
case 56: {
|
||||
setMaxVersions(input.readInt32());
|
||||
break;
|
||||
}
|
||||
case 66: {
|
||||
setFilter(input.readString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -390,7 +443,7 @@ public final class ScannerMessage {
|
|||
}
|
||||
public Builder clearStartRow() {
|
||||
result.hasStartRow = false;
|
||||
result.startRow_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.startRow_ = getDefaultInstance().getStartRow();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -411,7 +464,7 @@ public final class ScannerMessage {
|
|||
}
|
||||
public Builder clearEndRow() {
|
||||
result.hasEndRow = false;
|
||||
result.endRow_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.endRow_ = getDefaultInstance().getEndRow();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -508,11 +561,56 @@ public final class ScannerMessage {
|
|||
result.endTime_ = 0L;
|
||||
return this;
|
||||
}
|
||||
|
||||
// optional int32 maxVersions = 7;
|
||||
public boolean hasMaxVersions() {
|
||||
return result.hasMaxVersions();
|
||||
}
|
||||
public int getMaxVersions() {
|
||||
return result.getMaxVersions();
|
||||
}
|
||||
public Builder setMaxVersions(int value) {
|
||||
result.hasMaxVersions = true;
|
||||
result.maxVersions_ = value;
|
||||
return this;
|
||||
}
|
||||
public Builder clearMaxVersions() {
|
||||
result.hasMaxVersions = false;
|
||||
result.maxVersions_ = 0;
|
||||
return this;
|
||||
}
|
||||
|
||||
// optional string filter = 8;
|
||||
public boolean hasFilter() {
|
||||
return result.hasFilter();
|
||||
}
|
||||
public java.lang.String getFilter() {
|
||||
return result.getFilter();
|
||||
}
|
||||
public Builder setFilter(java.lang.String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.hasFilter = true;
|
||||
result.filter_ = value;
|
||||
return this;
|
||||
}
|
||||
public Builder clearFilter() {
|
||||
result.hasFilter = false;
|
||||
result.filter_ = getDefaultInstance().getFilter();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Scanner)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.getDescriptor();
|
||||
defaultInstance = new Scanner(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Scanner)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -528,12 +626,14 @@ public final class ScannerMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024ScannerMessage.proto\0223org.apache.hadoo" +
|
||||
"p.hbase.stargate.protobuf.generated\"o\n\007S" +
|
||||
"canner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(\014" +
|
||||
"\022\017\n\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tstar" +
|
||||
"tTime\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003";
|
||||
"p.hbase.stargate.protobuf.generated\"\224\001\n\007" +
|
||||
"Scanner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(" +
|
||||
"\014\022\017\n\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tsta" +
|
||||
"rtTime\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003\022\023\n\013maxVers" +
|
||||
"ions\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -544,7 +644,7 @@ public final class ScannerMessage {
|
|||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor,
|
||||
new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", },
|
||||
new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.Builder.class);
|
||||
return null;
|
||||
|
@ -555,4 +655,8 @@ public final class ScannerMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: StorageClusterStatusMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class StorageClusterStatusMessage {
|
|||
public static final class StorageClusterStatus extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use StorageClusterStatus.newBuilder() to construct.
|
||||
private StorageClusterStatus() {}
|
||||
private StorageClusterStatus() {
|
||||
initFields();
|
||||
}
|
||||
private StorageClusterStatus(boolean noInit) {}
|
||||
|
||||
private static final StorageClusterStatus defaultInstance = new StorageClusterStatus();
|
||||
private static final StorageClusterStatus defaultInstance;
|
||||
public static StorageClusterStatus getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class StorageClusterStatusMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
|
||||
|
@ -35,9 +38,12 @@ public final class StorageClusterStatusMessage {
|
|||
public static final class Region extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Region.newBuilder() to construct.
|
||||
private Region() {}
|
||||
private Region() {
|
||||
initFields();
|
||||
}
|
||||
private Region(boolean noInit) {}
|
||||
|
||||
private static final Region defaultInstance = new Region();
|
||||
private static final Region defaultInstance;
|
||||
public static Region getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -51,7 +57,6 @@ public final class StorageClusterStatusMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
|
||||
|
@ -99,15 +104,16 @@ public final class StorageClusterStatusMessage {
|
|||
public boolean hasStorefileIndexSizeMB() { return hasStorefileIndexSizeMB; }
|
||||
public int getStorefileIndexSizeMB() { return storefileIndexSizeMB_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeBytes(1, getName());
|
||||
}
|
||||
|
@ -130,7 +136,6 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -172,7 +177,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -183,7 +188,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -194,21 +199,30 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -217,43 +231,49 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDescriptor();
|
||||
|
@ -263,10 +283,12 @@ public final class StorageClusterStatusMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -274,7 +296,7 @@ public final class StorageClusterStatusMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -283,13 +305,13 @@ public final class StorageClusterStatusMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region)other);
|
||||
|
@ -323,18 +345,9 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -399,7 +412,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -492,19 +505,28 @@ public final class StorageClusterStatusMessage {
|
|||
result.storefileIndexSizeMB_ = 0;
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Region)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.getDescriptor();
|
||||
defaultInstance = new Region(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Region)
|
||||
}
|
||||
|
||||
public static final class Node extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Node.newBuilder() to construct.
|
||||
private Node() {}
|
||||
private Node() {
|
||||
initFields();
|
||||
}
|
||||
private Node(boolean noInit) {}
|
||||
|
||||
private static final Node defaultInstance = new Node();
|
||||
private static final Node defaultInstance;
|
||||
public static Node getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -518,7 +540,6 @@ public final class StorageClusterStatusMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
|
||||
|
@ -571,7 +592,8 @@ public final class StorageClusterStatusMessage {
|
|||
return regions_.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
|
@ -580,9 +602,9 @@ public final class StorageClusterStatusMessage {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -605,7 +627,6 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -647,7 +668,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -658,7 +679,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -669,21 +690,30 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -692,43 +722,49 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDescriptor();
|
||||
|
@ -738,10 +774,12 @@ public final class StorageClusterStatusMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -749,7 +787,7 @@ public final class StorageClusterStatusMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -758,7 +796,8 @@ public final class StorageClusterStatusMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.regions_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.regions_ =
|
||||
java.util.Collections.unmodifiableList(result.regions_);
|
||||
|
@ -768,7 +807,6 @@ public final class StorageClusterStatusMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node)other);
|
||||
|
@ -805,18 +843,9 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -883,7 +912,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1009,11 +1038,17 @@ public final class StorageClusterStatusMessage {
|
|||
result.regions_ = java.util.Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.getDescriptor();
|
||||
defaultInstance = new Node(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node)
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
|
||||
|
@ -1061,7 +1096,8 @@ public final class StorageClusterStatusMessage {
|
|||
public boolean hasAverageLoad() { return hasAverageLoad; }
|
||||
public double getAverageLoad() { return averageLoad_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
|
@ -1069,9 +1105,9 @@ public final class StorageClusterStatusMessage {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
output.writeMessage(1, element);
|
||||
}
|
||||
|
@ -1091,7 +1127,6 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -1134,7 +1169,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -1145,7 +1180,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -1156,21 +1191,30 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -1179,43 +1223,49 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDescriptor();
|
||||
|
@ -1225,10 +1275,12 @@ public final class StorageClusterStatusMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -1236,7 +1288,7 @@ public final class StorageClusterStatusMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -1245,7 +1297,8 @@ public final class StorageClusterStatusMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.liveNodes_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.liveNodes_ =
|
||||
java.util.Collections.unmodifiableList(result.liveNodes_);
|
||||
|
@ -1259,7 +1312,6 @@ public final class StorageClusterStatusMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus)other);
|
||||
|
@ -1296,18 +1348,9 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -1497,11 +1540,17 @@ public final class StorageClusterStatusMessage {
|
|||
result.averageLoad_ = 0D;
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.getDescriptor();
|
||||
defaultInstance = new StorageClusterStatus(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -1527,7 +1576,7 @@ public final class StorageClusterStatusMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n!StorageClusterStatusMessage.proto\0223org" +
|
||||
".apache.hadoop.hbase.stargate.protobuf.g" +
|
||||
"enerated\"\232\004\n\024StorageClusterStatus\022a\n\tliv" +
|
||||
|
@ -1537,13 +1586,14 @@ public final class StorageClusterStatusMessage {
|
|||
"ns\030\003 \001(\005\022\020\n\010requests\030\004 \001(\005\022\023\n\013averageLoa" +
|
||||
"d\030\005 \001(\001\032\211\001\n\006Region\022\014\n\004name\030\001 \002(\014\022\016\n\006stor" +
|
||||
"es\030\002 \001(\005\022\022\n\nstorefiles\030\003 \001(\005\022\027\n\017storefil" +
|
||||
"eSizeMB\030\004 \001(\005\022\026\n\016memstoreSizeMB\030\005 \001(\005\022\034\n" +
|
||||
"eSizeMB\030\004 \001(\005\022\026\n\016memstoreSizeMB\030\005 \001(\005\022\034\n",
|
||||
"\024storefileIndexSizeMB\030\006 \001(\005\032\307\001\n\004Node\022\014\n\004" +
|
||||
"name\030\001 \002(\t\022\021\n\tstartCode\030\002 \001(\003\022\020\n\010request" +
|
||||
"s\030\003 \001(\005\022\022\n\nheapSizeMB\030\004 \001(\005\022\025\n\rmaxHeapSi" +
|
||||
"zeMB\030\005 \001(\005\022a\n\007regions\030\006 \003(\0132P.org.apache" +
|
||||
".hadoop.hbase.stargate.protobuf.generate" +
|
||||
"d.StorageClusterStatus.Region";
|
||||
"d.StorageClusterStatus.Region"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -1581,4 +1631,8 @@ public final class StorageClusterStatusMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: TableInfoMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class TableInfoMessage {
|
|||
public static final class TableInfo extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use TableInfo.newBuilder() to construct.
|
||||
private TableInfo() {}
|
||||
private TableInfo() {
|
||||
initFields();
|
||||
}
|
||||
private TableInfo(boolean noInit) {}
|
||||
|
||||
private static final TableInfo defaultInstance = new TableInfo();
|
||||
private static final TableInfo defaultInstance;
|
||||
public static TableInfo getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class TableInfoMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_fieldAccessorTable;
|
||||
|
@ -35,9 +38,12 @@ public final class TableInfoMessage {
|
|||
public static final class Region extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Region.newBuilder() to construct.
|
||||
private Region() {}
|
||||
private Region() {
|
||||
initFields();
|
||||
}
|
||||
private Region(boolean noInit) {}
|
||||
|
||||
private static final Region defaultInstance = new Region();
|
||||
private static final Region defaultInstance;
|
||||
public static Region getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -51,7 +57,6 @@ public final class TableInfoMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_fieldAccessorTable;
|
||||
|
@ -92,15 +97,16 @@ public final class TableInfoMessage {
|
|||
public boolean hasLocation() { return hasLocation; }
|
||||
public java.lang.String getLocation() { return location_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -120,7 +126,6 @@ public final class TableInfoMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -158,7 +163,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -169,7 +174,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -180,21 +185,30 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -203,43 +217,49 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.getDescriptor();
|
||||
|
@ -249,10 +269,12 @@ public final class TableInfoMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -260,7 +282,7 @@ public final class TableInfoMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -269,13 +291,13 @@ public final class TableInfoMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region)other);
|
||||
|
@ -306,18 +328,9 @@ public final class TableInfoMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -378,7 +391,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -399,7 +412,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public Builder clearStartKey() {
|
||||
result.hasStartKey = false;
|
||||
result.startKey_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.startKey_ = getDefaultInstance().getStartKey();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -420,7 +433,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public Builder clearEndKey() {
|
||||
result.hasEndKey = false;
|
||||
result.endKey_ = com.google.protobuf.ByteString.EMPTY;
|
||||
result.endKey_ = getDefaultInstance().getEndKey();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -459,14 +472,20 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public Builder clearLocation() {
|
||||
result.hasLocation = false;
|
||||
result.location_ = "";
|
||||
result.location_ = getDefaultInstance().getLocation();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo.Region)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.getDescriptor();
|
||||
defaultInstance = new Region(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo.Region)
|
||||
}
|
||||
|
||||
// required string name = 1;
|
||||
|
@ -488,7 +507,8 @@ public final class TableInfoMessage {
|
|||
return regions_.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
|
@ -497,9 +517,9 @@ public final class TableInfoMessage {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -510,7 +530,6 @@ public final class TableInfoMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -536,7 +555,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -547,7 +566,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -558,21 +577,30 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -581,43 +609,49 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.getDescriptor();
|
||||
|
@ -627,10 +661,12 @@ public final class TableInfoMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -638,7 +674,7 @@ public final class TableInfoMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -647,7 +683,8 @@ public final class TableInfoMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.regions_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.regions_ =
|
||||
java.util.Collections.unmodifiableList(result.regions_);
|
||||
|
@ -657,7 +694,6 @@ public final class TableInfoMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo)other);
|
||||
|
@ -682,18 +718,9 @@ public final class TableInfoMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -744,7 +771,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -798,11 +825,17 @@ public final class TableInfoMessage {
|
|||
result.regions_ = java.util.Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.getDescriptor();
|
||||
defaultInstance = new TableInfo(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -823,7 +856,7 @@ public final class TableInfoMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\026TableInfoMessage.proto\0223org.apache.had" +
|
||||
"oop.hbase.stargate.protobuf.generated\"\311\001" +
|
||||
"\n\tTableInfo\022\014\n\004name\030\001 \002(\t\022V\n\007regions\030\002 \003" +
|
||||
|
@ -831,7 +864,8 @@ public final class TableInfoMessage {
|
|||
"otobuf.generated.TableInfo.Region\032V\n\006Reg" +
|
||||
"ion\022\014\n\004name\030\001 \002(\t\022\020\n\010startKey\030\002 \001(\014\022\016\n\006e" +
|
||||
"ndKey\030\003 \001(\014\022\n\n\002id\030\004 \001(\003\022\020\n\010location\030\005 \001(" +
|
||||
"\t";
|
||||
"\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -861,4 +895,8 @@ public final class TableInfoMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: TableListMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class TableListMessage {
|
|||
public static final class TableList extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use TableList.newBuilder() to construct.
|
||||
private TableList() {}
|
||||
private TableList() {
|
||||
initFields();
|
||||
}
|
||||
private TableList(boolean noInit) {}
|
||||
|
||||
private static final TableList defaultInstance = new TableList();
|
||||
private static final TableList defaultInstance;
|
||||
public static TableList getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class TableListMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_fieldAccessorTable;
|
||||
|
@ -44,14 +47,15 @@ public final class TableListMessage {
|
|||
return name_.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
for (java.lang.String element : getNameList()) {
|
||||
output.writeString(1, element);
|
||||
}
|
||||
|
@ -59,7 +63,6 @@ public final class TableListMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -86,7 +89,7 @@ public final class TableListMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -97,7 +100,7 @@ public final class TableListMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -108,21 +111,30 @@ public final class TableListMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -131,43 +143,49 @@ public final class TableListMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.getDescriptor();
|
||||
|
@ -177,10 +195,12 @@ public final class TableListMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -188,7 +208,7 @@ public final class TableListMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -197,7 +217,8 @@ public final class TableListMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.name_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.name_ =
|
||||
java.util.Collections.unmodifiableList(result.name_);
|
||||
|
@ -207,7 +228,6 @@ public final class TableListMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList)other);
|
||||
|
@ -229,18 +249,9 @@ public final class TableListMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -307,11 +318,17 @@ public final class TableListMessage {
|
|||
result.name_ = java.util.Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableList)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.getDescriptor();
|
||||
defaultInstance = new TableList(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableList)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -327,10 +344,11 @@ public final class TableListMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\026TableListMessage.proto\0223org.apache.had" +
|
||||
"oop.hbase.stargate.protobuf.generated\"\031\n" +
|
||||
"\tTableList\022\014\n\004name\030\001 \003(\t";
|
||||
"\tTableList\022\014\n\004name\030\001 \003(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -352,4 +370,8 @@ public final class TableListMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: TableSchemaMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class TableSchemaMessage {
|
|||
public static final class TableSchema extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use TableSchema.newBuilder() to construct.
|
||||
private TableSchema() {}
|
||||
private TableSchema() {
|
||||
initFields();
|
||||
}
|
||||
private TableSchema(boolean noInit) {}
|
||||
|
||||
private static final TableSchema defaultInstance = new TableSchema();
|
||||
private static final TableSchema defaultInstance;
|
||||
public static TableSchema getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class TableSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_fieldAccessorTable;
|
||||
|
@ -35,9 +38,12 @@ public final class TableSchemaMessage {
|
|||
public static final class Attribute extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Attribute.newBuilder() to construct.
|
||||
private Attribute() {}
|
||||
private Attribute() {
|
||||
initFields();
|
||||
}
|
||||
private Attribute(boolean noInit) {}
|
||||
|
||||
private static final Attribute defaultInstance = new Attribute();
|
||||
private static final Attribute defaultInstance;
|
||||
public static Attribute getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -51,7 +57,6 @@ public final class TableSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
|
||||
|
@ -71,16 +76,17 @@ public final class TableSchemaMessage {
|
|||
public boolean hasValue() { return hasValue; }
|
||||
public java.lang.String getValue() { return value_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
if (!hasValue) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -91,7 +97,6 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -117,7 +122,7 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -128,7 +133,7 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -139,21 +144,30 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -162,43 +176,49 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDescriptor();
|
||||
|
@ -208,10 +228,12 @@ public final class TableSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -219,7 +241,7 @@ public final class TableSchemaMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -228,13 +250,13 @@ public final class TableSchemaMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute)other);
|
||||
|
@ -256,18 +278,9 @@ public final class TableSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -316,7 +329,7 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -337,14 +350,20 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public Builder clearValue() {
|
||||
result.hasValue = false;
|
||||
result.value_ = "";
|
||||
result.value_ = getDefaultInstance().getValue();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema.Attribute)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.getDescriptor();
|
||||
defaultInstance = new Attribute(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema.Attribute)
|
||||
}
|
||||
|
||||
// optional string name = 1;
|
||||
|
@ -392,7 +411,8 @@ public final class TableSchemaMessage {
|
|||
public boolean hasReadOnly() { return hasReadOnly; }
|
||||
public boolean getReadOnly() { return readOnly_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
|
@ -403,9 +423,9 @@ public final class TableSchemaMessage {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
|
@ -425,7 +445,6 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -463,7 +482,7 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -474,7 +493,7 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -485,21 +504,30 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -508,43 +536,49 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.getDescriptor();
|
||||
|
@ -554,10 +588,12 @@ public final class TableSchemaMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -565,7 +601,7 @@ public final class TableSchemaMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -574,7 +610,8 @@ public final class TableSchemaMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
if (result.attrs_ != java.util.Collections.EMPTY_LIST) {
|
||||
result.attrs_ =
|
||||
java.util.Collections.unmodifiableList(result.attrs_);
|
||||
|
@ -588,7 +625,6 @@ public final class TableSchemaMessage {
|
|||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema)other);
|
||||
|
@ -625,18 +661,9 @@ public final class TableSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -701,7 +728,7 @@ public final class TableSchemaMessage {
|
|||
}
|
||||
public Builder clearName() {
|
||||
result.hasName = false;
|
||||
result.name_ = "";
|
||||
result.name_ = getDefaultInstance().getName();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -842,11 +869,17 @@ public final class TableSchemaMessage {
|
|||
result.readOnly_ = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.getDescriptor();
|
||||
defaultInstance = new TableSchema(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -867,7 +900,7 @@ public final class TableSchemaMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\030TableSchemaMessage.proto\0223org.apache.h" +
|
||||
"adoop.hbase.stargate.protobuf.generated\032" +
|
||||
"\031ColumnSchemaMessage.proto\"\230\002\n\013TableSche" +
|
||||
|
@ -877,7 +910,8 @@ public final class TableSchemaMessage {
|
|||
" \003(\0132A.org.apache.hadoop.hbase.stargate." +
|
||||
"protobuf.generated.ColumnSchema\022\020\n\010inMem" +
|
||||
"ory\030\004 \001(\010\022\020\n\010readOnly\030\005 \001(\010\032(\n\tAttribute" +
|
||||
"\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t";
|
||||
"\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -908,4 +942,8 @@ public final class TableSchemaMessage {
|
|||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: VersionMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
|
||||
|
@ -10,9 +11,12 @@ public final class VersionMessage {
|
|||
public static final class Version extends
|
||||
com.google.protobuf.GeneratedMessage {
|
||||
// Use Version.newBuilder() to construct.
|
||||
private Version() {}
|
||||
private Version() {
|
||||
initFields();
|
||||
}
|
||||
private Version(boolean noInit) {}
|
||||
|
||||
private static final Version defaultInstance = new Version();
|
||||
private static final Version defaultInstance;
|
||||
public static Version getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
@ -26,7 +30,6 @@ public final class VersionMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_fieldAccessorTable;
|
||||
|
@ -67,14 +70,15 @@ public final class VersionMessage {
|
|||
public boolean hasJerseyVersion() { return hasJerseyVersion; }
|
||||
public java.lang.String getJerseyVersion() { return jerseyVersion_; }
|
||||
|
||||
@Override
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasStargateVersion()) {
|
||||
output.writeString(1, getStargateVersion());
|
||||
}
|
||||
|
@ -94,7 +98,6 @@ public final class VersionMessage {
|
|||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
@ -132,7 +135,7 @@ public final class VersionMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -143,7 +146,7 @@ public final class VersionMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
|
@ -154,21 +157,30 @@ public final class VersionMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input).buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeDelimitedFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
|
@ -177,43 +189,49 @@ public final class VersionMessage {
|
|||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return new Builder(); }
|
||||
public Builder newBuilderForType() { return new Builder(); }
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version prototype) {
|
||||
return new Builder().mergeFrom(prototype);
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version result = new org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version();
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
return new Builder().mergeFrom(result);
|
||||
return create().mergeFrom(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.getDescriptor();
|
||||
|
@ -223,10 +241,12 @@ public final class VersionMessage {
|
|||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
result);
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
@ -234,7 +254,7 @@ public final class VersionMessage {
|
|||
private org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw new com.google.protobuf.UninitializedMessageException(
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
|
@ -243,13 +263,13 @@ public final class VersionMessage {
|
|||
public org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder."); }
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version)other);
|
||||
|
@ -280,18 +300,9 @@ public final class VersionMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return mergeFrom(input,
|
||||
com.google.protobuf.ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistry extensionRegistry)
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
|
@ -352,7 +363,7 @@ public final class VersionMessage {
|
|||
}
|
||||
public Builder clearStargateVersion() {
|
||||
result.hasStargateVersion = false;
|
||||
result.stargateVersion_ = "";
|
||||
result.stargateVersion_ = getDefaultInstance().getStargateVersion();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -373,7 +384,7 @@ public final class VersionMessage {
|
|||
}
|
||||
public Builder clearJvmVersion() {
|
||||
result.hasJvmVersion = false;
|
||||
result.jvmVersion_ = "";
|
||||
result.jvmVersion_ = getDefaultInstance().getJvmVersion();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -394,7 +405,7 @@ public final class VersionMessage {
|
|||
}
|
||||
public Builder clearOsVersion() {
|
||||
result.hasOsVersion = false;
|
||||
result.osVersion_ = "";
|
||||
result.osVersion_ = getDefaultInstance().getOsVersion();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -415,7 +426,7 @@ public final class VersionMessage {
|
|||
}
|
||||
public Builder clearServerVersion() {
|
||||
result.hasServerVersion = false;
|
||||
result.serverVersion_ = "";
|
||||
result.serverVersion_ = getDefaultInstance().getServerVersion();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -436,14 +447,20 @@ public final class VersionMessage {
|
|||
}
|
||||
public Builder clearJerseyVersion() {
|
||||
result.hasJerseyVersion = false;
|
||||
result.jerseyVersion_ = "";
|
||||
result.jerseyVersion_ = getDefaultInstance().getJerseyVersion();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Version)
|
||||
}
|
||||
|
||||
static {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.getDescriptor();
|
||||
defaultInstance = new Version(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Version)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
|
@ -459,12 +476,13 @@ public final class VersionMessage {
|
|||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String descriptorData =
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024VersionMessage.proto\0223org.apache.hadoo" +
|
||||
"p.hbase.stargate.protobuf.generated\"w\n\007V" +
|
||||
"ersion\022\027\n\017stargateVersion\030\001 \001(\t\022\022\n\njvmVe" +
|
||||
"rsion\030\002 \001(\t\022\021\n\tosVersion\030\003 \001(\t\022\025\n\rserver" +
|
||||
"Version\030\004 \001(\t\022\025\n\rjerseyVersion\030\005 \001(\t";
|
||||
"Version\030\004 \001(\t\022\025\n\rjerseyVersion\030\005 \001(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
|
@ -486,4 +504,8 @@ public final class VersionMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
public static void internalForceInit() {}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -25,4 +25,6 @@ message Scanner {
|
|||
optional int32 batch = 4;
|
||||
optional int64 startTime = 5;
|
||||
optional int64 endTime = 6;
|
||||
optional int32 maxVersions = 7;
|
||||
optional string filter = 8;
|
||||
}
|
|
@ -0,0 +1,979 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
import javax.xml.bind.Marshaller;
|
||||
import javax.xml.bind.Unmarshaller;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.filter.BinaryComparator;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.filter.FilterList;
|
||||
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
|
||||
import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
|
||||
import org.apache.hadoop.hbase.filter.PageFilter;
|
||||
import org.apache.hadoop.hbase.filter.PrefixFilter;
|
||||
import org.apache.hadoop.hbase.filter.QualifierFilter;
|
||||
import org.apache.hadoop.hbase.filter.RegexStringComparator;
|
||||
import org.apache.hadoop.hbase.filter.RowFilter;
|
||||
import org.apache.hadoop.hbase.filter.SkipFilter;
|
||||
import org.apache.hadoop.hbase.filter.SubstringComparator;
|
||||
import org.apache.hadoop.hbase.filter.ValueFilter;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestScannersWithFilters extends MiniClusterTestBase {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestScannersWithFilters.class);
|
||||
|
||||
private Client client;
|
||||
private JAXBContext context;
|
||||
private Marshaller marshaller;
|
||||
private Unmarshaller unmarshaller;
|
||||
|
||||
private static final byte [][] ROWS_ONE = {
|
||||
Bytes.toBytes("testRowOne-0"), Bytes.toBytes("testRowOne-1"),
|
||||
Bytes.toBytes("testRowOne-2"), Bytes.toBytes("testRowOne-3")
|
||||
};
|
||||
|
||||
private static final byte [][] ROWS_TWO = {
|
||||
Bytes.toBytes("testRowTwo-0"), Bytes.toBytes("testRowTwo-1"),
|
||||
Bytes.toBytes("testRowTwo-2"), Bytes.toBytes("testRowTwo-3")
|
||||
};
|
||||
|
||||
private static final byte [][] FAMILIES = {
|
||||
Bytes.toBytes("testFamilyOne"), Bytes.toBytes("testFamilyTwo")
|
||||
};
|
||||
|
||||
private static final byte [][] QUALIFIERS_ONE = {
|
||||
Bytes.toBytes("testQualifierOne-0"), Bytes.toBytes("testQualifierOne-1"),
|
||||
Bytes.toBytes("testQualifierOne-2"), Bytes.toBytes("testQualifierOne-3")
|
||||
};
|
||||
|
||||
private static final byte [][] QUALIFIERS_TWO = {
|
||||
Bytes.toBytes("testQualifierTwo-0"), Bytes.toBytes("testQualifierTwo-1"),
|
||||
Bytes.toBytes("testQualifierTwo-2"), Bytes.toBytes("testQualifierTwo-3")
|
||||
};
|
||||
|
||||
private static final byte [][] VALUES = {
|
||||
Bytes.toBytes("testValueOne"), Bytes.toBytes("testValueTwo")
|
||||
};
|
||||
|
||||
private long numRows = ROWS_ONE.length + ROWS_TWO.length;
|
||||
private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
|
||||
|
||||
public TestScannersWithFilters() throws JAXBException {
|
||||
super();
|
||||
context = JAXBContext.newInstance(
|
||||
CellModel.class,
|
||||
CellSetModel.class,
|
||||
RowModel.class,
|
||||
ScannerModel.class);
|
||||
marshaller = context.createMarshaller();
|
||||
unmarshaller = context.createUnmarshaller();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
client = new Client(new Cluster().add("localhost", testServletPort));
|
||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||
if (!admin.tableExists(getName())) {
|
||||
HTableDescriptor htd = new HTableDescriptor(getName());
|
||||
htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
|
||||
htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
|
||||
admin.createTable(htd);
|
||||
HTable table = new HTable(conf, getName());
|
||||
// Insert first half
|
||||
for(byte [] ROW : ROWS_ONE) {
|
||||
Put p = new Put(ROW);
|
||||
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
|
||||
p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
|
||||
}
|
||||
table.put(p);
|
||||
}
|
||||
for(byte [] ROW : ROWS_TWO) {
|
||||
Put p = new Put(ROW);
|
||||
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
|
||||
p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
|
||||
}
|
||||
table.put(p);
|
||||
}
|
||||
|
||||
// Insert second half (reverse families)
|
||||
for(byte [] ROW : ROWS_ONE) {
|
||||
Put p = new Put(ROW);
|
||||
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
|
||||
p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
|
||||
}
|
||||
table.put(p);
|
||||
}
|
||||
for(byte [] ROW : ROWS_TWO) {
|
||||
Put p = new Put(ROW);
|
||||
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
|
||||
p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
|
||||
}
|
||||
table.put(p);
|
||||
}
|
||||
|
||||
// Delete the second qualifier from all rows and families
|
||||
for(byte [] ROW : ROWS_ONE) {
|
||||
Delete d = new Delete(ROW);
|
||||
d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
|
||||
d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
|
||||
table.delete(d);
|
||||
}
|
||||
for(byte [] ROW : ROWS_TWO) {
|
||||
Delete d = new Delete(ROW);
|
||||
d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
|
||||
d.deleteColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
|
||||
table.delete(d);
|
||||
}
|
||||
colsPerRow -= 2;
|
||||
|
||||
// Delete the second rows from both groups, one column at a time
|
||||
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
|
||||
Delete d = new Delete(ROWS_ONE[1]);
|
||||
d.deleteColumns(FAMILIES[0], QUALIFIER);
|
||||
d.deleteColumns(FAMILIES[1], QUALIFIER);
|
||||
table.delete(d);
|
||||
}
|
||||
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
|
||||
Delete d = new Delete(ROWS_TWO[1]);
|
||||
d.deleteColumns(FAMILIES[0], QUALIFIER);
|
||||
d.deleteColumns(FAMILIES[1], QUALIFIER);
|
||||
table.delete(d);
|
||||
}
|
||||
numRows -= 2;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
client.shutdown();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
private void verifyScan(Scan s, long expectedRows, long expectedKeys)
|
||||
throws Exception {
|
||||
ScannerModel model = ScannerModel.fromScan(s);
|
||||
model.setBatch(Integer.MAX_VALUE); // fetch it all at once
|
||||
StringWriter writer = new StringWriter();
|
||||
marshaller.marshal(model, writer);
|
||||
LOG.debug(writer.toString());
|
||||
byte[] body = Bytes.toBytes(writer.toString());
|
||||
Response response = client.put("/" + getName() + "/scanner", MIMETYPE_XML,
|
||||
body);
|
||||
assertEquals(response.getCode(), 201);
|
||||
String scannerURI = response.getLocation();
|
||||
assertNotNull(scannerURI);
|
||||
|
||||
// get a cell set
|
||||
response = client.get(scannerURI, MIMETYPE_XML);
|
||||
assertEquals(response.getCode(), 200);
|
||||
CellSetModel cells = (CellSetModel)
|
||||
unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody()));
|
||||
|
||||
int rows = cells.getRows().size();
|
||||
assertTrue("Scanned too many rows! Only expected " + expectedRows +
|
||||
" total but scanned " + rows, expectedRows == rows);
|
||||
for (RowModel row: cells.getRows()) {
|
||||
int count = row.getCells().size();
|
||||
assertEquals("Expected " + expectedKeys + " keys per row but " +
|
||||
"returned " + count, expectedKeys, count);
|
||||
}
|
||||
|
||||
// delete the scanner
|
||||
response = client.delete(scannerURI);
|
||||
assertEquals(response.getCode(), 200);
|
||||
}
|
||||
|
||||
private void verifyScanFull(Scan s, KeyValue [] kvs) throws Exception {
|
||||
ScannerModel model = ScannerModel.fromScan(s);
|
||||
model.setBatch(Integer.MAX_VALUE); // fetch it all at once
|
||||
StringWriter writer = new StringWriter();
|
||||
marshaller.marshal(model, writer);
|
||||
LOG.debug(writer.toString());
|
||||
byte[] body = Bytes.toBytes(writer.toString());
|
||||
Response response = client.put("/" + getName() + "/scanner", MIMETYPE_XML,
|
||||
body);
|
||||
assertEquals(response.getCode(), 201);
|
||||
String scannerURI = response.getLocation();
|
||||
assertNotNull(scannerURI);
|
||||
|
||||
// get a cell set
|
||||
response = client.get(scannerURI, MIMETYPE_XML);
|
||||
assertEquals(response.getCode(), 200);
|
||||
CellSetModel cellSet = (CellSetModel)
|
||||
unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody()));
|
||||
|
||||
// delete the scanner
|
||||
response = client.delete(scannerURI);
|
||||
assertEquals(response.getCode(), 200);
|
||||
|
||||
int row = 0;
|
||||
int idx = 0;
|
||||
Iterator<RowModel> i = cellSet.getRows().iterator();
|
||||
for (boolean done = true; done; row++) {
|
||||
done = i.hasNext();
|
||||
if (!done) break;
|
||||
RowModel rowModel = i.next();
|
||||
List<CellModel> cells = rowModel.getCells();
|
||||
if (cells.isEmpty()) break;
|
||||
assertTrue("Scanned too many keys! Only expected " + kvs.length +
|
||||
" total but already scanned " + (cells.size() + idx),
|
||||
kvs.length >= idx + cells.size());
|
||||
for (CellModel cell: cells) {
|
||||
assertTrue("Row mismatch",
|
||||
Bytes.equals(rowModel.getKey(), kvs[idx].getRow()));
|
||||
byte[][] split = KeyValue.parseColumn(cell.getColumn());
|
||||
assertTrue("Family mismatch",
|
||||
Bytes.equals(split[0], kvs[idx].getFamily()));
|
||||
assertTrue("Qualifier mismatch",
|
||||
Bytes.equals(split[1], kvs[idx].getQualifier()));
|
||||
assertTrue("Value mismatch",
|
||||
Bytes.equals(cell.getValue(), kvs[idx].getValue()));
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
assertEquals("Expected " + kvs.length + " total keys but scanned " + idx,
|
||||
kvs.length, idx);
|
||||
}
|
||||
|
||||
private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys)
|
||||
throws Exception {
|
||||
ScannerModel model = ScannerModel.fromScan(s);
|
||||
model.setBatch(Integer.MAX_VALUE); // fetch it all at once
|
||||
StringWriter writer = new StringWriter();
|
||||
marshaller.marshal(model, writer);
|
||||
LOG.debug(writer.toString());
|
||||
byte[] body = Bytes.toBytes(writer.toString());
|
||||
Response response = client.put("/" + getName() + "/scanner", MIMETYPE_XML,
|
||||
body);
|
||||
assertEquals(response.getCode(), 201);
|
||||
String scannerURI = response.getLocation();
|
||||
assertNotNull(scannerURI);
|
||||
|
||||
// get a cell set
|
||||
response = client.get(scannerURI, MIMETYPE_XML);
|
||||
assertEquals(response.getCode(), 200);
|
||||
CellSetModel cellSet = (CellSetModel)
|
||||
unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody()));
|
||||
|
||||
// delete the scanner
|
||||
response = client.delete(scannerURI);
|
||||
assertEquals(response.getCode(), 200);
|
||||
|
||||
Iterator<RowModel> i = cellSet.getRows().iterator();
|
||||
int j = 0;
|
||||
for (boolean done = true; done; j++) {
|
||||
done = i.hasNext();
|
||||
if (!done) break;
|
||||
RowModel rowModel = i.next();
|
||||
List<CellModel> cells = rowModel.getCells();
|
||||
if (cells.isEmpty()) break;
|
||||
assertTrue("Scanned too many rows! Only expected " + expectedRows +
|
||||
" total but already scanned " + (j+1), expectedRows > j);
|
||||
assertEquals("Expected " + expectedKeys + " keys per row but " +
|
||||
"returned " + cells.size(), expectedKeys, cells.size());
|
||||
}
|
||||
assertEquals("Expected " + expectedRows + " rows but scanned " + j +
|
||||
" rows", expectedRows, j);
|
||||
}
|
||||
|
||||
public void testNoFilter() throws Exception {
|
||||
// No filter
|
||||
long expectedRows = this.numRows;
|
||||
long expectedKeys = this.colsPerRow;
|
||||
|
||||
// Both families
|
||||
Scan s = new Scan();
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
|
||||
// One family
|
||||
s = new Scan();
|
||||
s.addFamily(FAMILIES[0]);
|
||||
verifyScan(s, expectedRows, expectedKeys/2);
|
||||
}
|
||||
|
||||
public void testPrefixFilter() throws Exception {
|
||||
// Grab rows from group one (half of total)
|
||||
long expectedRows = this.numRows / 2;
|
||||
long expectedKeys = this.colsPerRow;
|
||||
Scan s = new Scan();
|
||||
s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
}
|
||||
|
||||
public void testPageFilter() throws Exception {
|
||||
|
||||
// KVs in first 6 rows
|
||||
KeyValue [] expectedKVs = {
|
||||
// testRowOne-0
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-2
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-3
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowTwo-0
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-3
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
|
||||
};
|
||||
|
||||
// Grab all 6 rows
|
||||
long expectedRows = 6;
|
||||
long expectedKeys = this.colsPerRow;
|
||||
Scan s = new Scan();
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScanFull(s, expectedKVs);
|
||||
|
||||
// Grab first 4 rows (6 cols per row)
|
||||
expectedRows = 4;
|
||||
expectedKeys = this.colsPerRow;
|
||||
s = new Scan();
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
|
||||
|
||||
// Grab first 2 rows
|
||||
expectedRows = 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
s = new Scan();
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScanFull(s, Arrays.copyOf(expectedKVs, 12));
|
||||
|
||||
// Grab first row
|
||||
expectedRows = 1;
|
||||
expectedKeys = this.colsPerRow;
|
||||
s = new Scan();
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
s.setFilter(new PageFilter(expectedRows));
|
||||
verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));
|
||||
}
|
||||
|
||||
public void testInclusiveStopFilter() throws Exception {
|
||||
|
||||
// Grab rows from group one
|
||||
|
||||
// If we just use start/stop row, we get total/2 - 1 rows
|
||||
long expectedRows = (this.numRows / 2) - 1;
|
||||
long expectedKeys = this.colsPerRow;
|
||||
Scan s = new Scan(Bytes.toBytes("testRowOne-0"),
|
||||
Bytes.toBytes("testRowOne-3"));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
|
||||
// Now use start row with inclusive stop filter
|
||||
expectedRows = this.numRows / 2;
|
||||
s = new Scan(Bytes.toBytes("testRowOne-0"));
|
||||
s.setFilter(new InclusiveStopFilter(Bytes.toBytes("testRowOne-3")));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
|
||||
// Grab rows from group two
|
||||
|
||||
// If we just use start/stop row, we get total/2 - 1 rows
|
||||
expectedRows = (this.numRows / 2) - 1;
|
||||
expectedKeys = this.colsPerRow;
|
||||
s = new Scan(Bytes.toBytes("testRowTwo-0"),
|
||||
Bytes.toBytes("testRowTwo-3"));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
|
||||
// Now use start row with inclusive stop filter
|
||||
expectedRows = this.numRows / 2;
|
||||
s = new Scan(Bytes.toBytes("testRowTwo-0"));
|
||||
s.setFilter(new InclusiveStopFilter(Bytes.toBytes("testRowTwo-3")));
|
||||
verifyScan(s, expectedRows, expectedKeys);
|
||||
|
||||
}
|
||||
|
||||
public void testQualifierFilter() throws Exception {
|
||||
|
||||
// Match two keys (one from each family) in half the rows
|
||||
long expectedRows = this.numRows / 2;
|
||||
long expectedKeys = 2;
|
||||
Filter f = new QualifierFilter(CompareOp.EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
|
||||
Scan s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys less than same qualifier
|
||||
// Expect only two keys (one from each family) in half the rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = 2;
|
||||
f = new QualifierFilter(CompareOp.LESS,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys less than or equal
|
||||
// Expect four keys (two from each family) in half the rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = 4;
|
||||
f = new QualifierFilter(CompareOp.LESS_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys not equal
|
||||
// Expect four keys (two from each family)
|
||||
// Only look in first group of rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = 4;
|
||||
f = new QualifierFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
|
||||
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys greater or equal
|
||||
// Expect four keys (two from each family)
|
||||
// Only look in first group of rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = 4;
|
||||
f = new QualifierFilter(CompareOp.GREATER_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
|
||||
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys greater
|
||||
// Expect two keys (one from each family)
|
||||
// Only look in first group of rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = 2;
|
||||
f = new QualifierFilter(CompareOp.GREATER,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
|
||||
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys not equal to
|
||||
// Look across rows and fully validate the keys and ordering
|
||||
// Expect varied numbers of keys, 4 per row in group one, 6 per row in group two
|
||||
f = new QualifierFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(QUALIFIERS_ONE[2]));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
|
||||
KeyValue [] kvs = {
|
||||
// testRowOne-0
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-2
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-3
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowTwo-0
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-3
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
|
||||
|
||||
// Test across rows and groups with a regex
|
||||
// Filter out "test*-2"
|
||||
// Expect 4 keys per row across both groups
|
||||
f = new QualifierFilter(CompareOp.NOT_EQUAL,
|
||||
new RegexStringComparator("test.+-2"));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
|
||||
kvs = new KeyValue [] {
|
||||
// testRowOne-0
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-2
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-3
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowTwo-0
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-3
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
|
||||
}
|
||||
|
||||
public void testRowFilter() throws Exception {
|
||||
|
||||
// Match a single row, all keys
|
||||
long expectedRows = 1;
|
||||
long expectedKeys = this.colsPerRow;
|
||||
Filter f = new RowFilter(CompareOp.EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
Scan s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match a two rows, one from each group, using regex
|
||||
expectedRows = 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new RowFilter(CompareOp.EQUAL,
|
||||
new RegexStringComparator("testRow.+-2"));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match rows less than
|
||||
// Expect all keys in one row
|
||||
expectedRows = 1;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new RowFilter(CompareOp.LESS,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match rows less than or equal
|
||||
// Expect all keys in two rows
|
||||
expectedRows = 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new RowFilter(CompareOp.LESS_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match rows not equal
|
||||
// Expect all keys in all but one row
|
||||
expectedRows = this.numRows - 1;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new RowFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys greater or equal
|
||||
// Expect all keys in all but one row
|
||||
expectedRows = this.numRows - 1;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new RowFilter(CompareOp.GREATER_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match keys greater
|
||||
// Expect all keys in all but two rows
|
||||
expectedRows = this.numRows - 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new RowFilter(CompareOp.GREATER,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match rows not equal to testRowTwo-2
|
||||
// Look across rows and fully validate the keys and ordering
|
||||
// Should see all keys in all rows but testRowTwo-2
|
||||
f = new RowFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
|
||||
KeyValue [] kvs = {
|
||||
// testRowOne-0
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowOne-3
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowTwo-0
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-3
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
|
||||
|
||||
// Test across rows and groups with a regex
|
||||
// Filter out everything that doesn't match "*-2"
|
||||
// Expect all keys in two rows
|
||||
f = new RowFilter(CompareOp.EQUAL,
|
||||
new RegexStringComparator(".+-2"));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
|
||||
kvs = new KeyValue [] {
|
||||
// testRowOne-2
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
|
||||
}
|
||||
|
||||
public void testValueFilter() throws Exception {
|
||||
|
||||
// Match group one rows
|
||||
long expectedRows = this.numRows / 2;
|
||||
long expectedKeys = this.colsPerRow;
|
||||
Filter f = new ValueFilter(CompareOp.EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueOne")));
|
||||
Scan s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match group two rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueTwo")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match all values using regex
|
||||
expectedRows = this.numRows;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.EQUAL,
|
||||
new RegexStringComparator("testValue((One)|(Two))"));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values less than
|
||||
// Expect group one rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.LESS,
|
||||
new BinaryComparator(Bytes.toBytes("testValueTwo")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values less than or equal
|
||||
// Expect all rows
|
||||
expectedRows = this.numRows;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.LESS_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueTwo")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values less than or equal
|
||||
// Expect group one rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.LESS_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueOne")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values not equal
|
||||
// Expect half the rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueOne")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values greater or equal
|
||||
// Expect all rows
|
||||
expectedRows = this.numRows;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.GREATER_OR_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueOne")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values greater
|
||||
// Expect half rows
|
||||
expectedRows = this.numRows / 2;
|
||||
expectedKeys = this.colsPerRow;
|
||||
f = new ValueFilter(CompareOp.GREATER,
|
||||
new BinaryComparator(Bytes.toBytes("testValueOne")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
|
||||
|
||||
// Match values not equal to testValueOne
|
||||
// Look across rows and fully validate the keys and ordering
|
||||
// Should see all keys in all group two rows
|
||||
f = new ValueFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testValueOne")));
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
|
||||
KeyValue [] kvs = {
|
||||
// testRowTwo-0
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-3
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
}
|
||||
|
||||
public void testSkipFilter() throws Exception {
|
||||
|
||||
// Test for qualifier regex: "testQualifierOne-2"
|
||||
// Should only get rows from second group, and all keys
|
||||
Filter f = new SkipFilter(new QualifierFilter(CompareOp.NOT_EQUAL,
|
||||
new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
|
||||
Scan s = new Scan();
|
||||
s.setFilter(f);
|
||||
|
||||
KeyValue [] kvs = {
|
||||
// testRowTwo-0
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-2
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
// testRowTwo-3
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
}
|
||||
|
||||
public void testFilterList() throws Exception {
|
||||
|
||||
// Test getting a single row, single key using Row, Qualifier, and Value
|
||||
// regular expression and substring filters
|
||||
// Use must pass all
|
||||
List<Filter> filters = new ArrayList<Filter>();
|
||||
filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
|
||||
filters.add(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
|
||||
filters.add(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("One")));
|
||||
Filter f = new FilterList(Operator.MUST_PASS_ALL, filters);
|
||||
Scan s = new Scan();
|
||||
s.addFamily(FAMILIES[0]);
|
||||
s.setFilter(f);
|
||||
KeyValue [] kvs = {
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0])
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
|
||||
// Test getting everything with a MUST_PASS_ONE filter including row, qf, val
|
||||
// regular expression and substring filters
|
||||
filters.clear();
|
||||
filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".+Two.+")));
|
||||
filters.add(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
|
||||
filters.add(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("One")));
|
||||
f = new FilterList(Operator.MUST_PASS_ONE, filters);
|
||||
s = new Scan();
|
||||
s.setFilter(f);
|
||||
verifyScanNoEarlyOut(s, this.numRows, this.colsPerRow);
|
||||
}
|
||||
|
||||
public void testFirstKeyOnlyFilter() throws Exception {
|
||||
Scan s = new Scan();
|
||||
s.setFilter(new FirstKeyOnlyFilter());
|
||||
// Expected KVs, the first KV from each of the remaining 6 rows
|
||||
KeyValue [] kvs = {
|
||||
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
|
||||
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
|
||||
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1])
|
||||
};
|
||||
verifyScanFull(s, kvs);
|
||||
}
|
||||
|
||||
}
|
|
@ -20,46 +20,21 @@
|
|||
|
||||
package org.apache.hadoop.hbase.filter;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
* A binary comparator which lexicographically compares against the specified
|
||||
* byte array using {@link Bytes#compareTo(byte[], byte[])}.
|
||||
*/
|
||||
public class BinaryComparator implements WritableByteArrayComparable {
|
||||
public class BinaryComparator extends WritableByteArrayComparable {
|
||||
|
||||
private byte [] value;
|
||||
/** Nullary constructor for Writable, do not use */
|
||||
public BinaryComparator() { }
|
||||
|
||||
/**
|
||||
* Writable constructor, do not use.
|
||||
* Constructor
|
||||
* @param value
|
||||
*/
|
||||
public BinaryComparator() {
|
||||
public BinaryComparator(byte[] value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param value the value to compare against
|
||||
*/
|
||||
public BinaryComparator(byte [] value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
value = Bytes.readByteArray(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
Bytes.writeByteArray(out, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(byte [] value) {
|
||||
return Bytes.compareTo(this.value, value);
|
||||
}
|
||||
}
|
|
@ -22,45 +22,28 @@ package org.apache.hadoop.hbase.filter;
|
|||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A comparator which compares against a specified byte array, but only compares
|
||||
* up to the length of this byte array. For the rest it is similar to
|
||||
* {@link BinaryComparator}.
|
||||
*/
|
||||
public class BinaryPrefixComparator implements WritableByteArrayComparable {
|
||||
private byte [] value;
|
||||
public class BinaryPrefixComparator extends WritableByteArrayComparable {
|
||||
|
||||
/** Nullary constructor for Writable, do not use */
|
||||
public BinaryPrefixComparator() { }
|
||||
|
||||
/**
|
||||
* Writable constructor, do not use.
|
||||
* Constructor
|
||||
* @param value
|
||||
*/
|
||||
public BinaryPrefixComparator() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param value the value to compare against
|
||||
*/
|
||||
public BinaryPrefixComparator(byte [] value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
value = Bytes.readByteArray(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
Bytes.writeByteArray(out, value);
|
||||
public BinaryPrefixComparator(byte[] value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(byte [] value) {
|
||||
return Bytes.compareTo(this.value, 0, this.value.length, value, 0, this.value.length);
|
||||
return Bytes.compareTo(this.value, 0, this.value.length, value, 0,
|
||||
this.value.length);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,6 +28,10 @@ public class ColumnCountGetFilter implements Filter {
|
|||
this.limit = n;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
public boolean filterAllRemaining() {
|
||||
return this.count > this.limit;
|
||||
}
|
||||
|
|
|
@ -81,6 +81,20 @@ public abstract class CompareFilter implements Filter {
|
|||
this.comparator = comparator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return operator
|
||||
*/
|
||||
public CompareOp getOperator() {
|
||||
return compareOp;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the comparator
|
||||
*/
|
||||
public WritableByteArrayComparable getComparator() {
|
||||
return comparator;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
}
|
||||
|
||||
|
|
|
@ -45,6 +45,10 @@ public class InclusiveStopFilter implements Filter {
|
|||
this.stopRowKey = stopRowKey;
|
||||
}
|
||||
|
||||
public byte[] getStopRowKey() {
|
||||
return this.stopRowKey;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
// noop, no state
|
||||
}
|
||||
|
|
|
@ -57,6 +57,10 @@ public class PageFilter implements Filter {
|
|||
this.pageSize = pageSize;
|
||||
}
|
||||
|
||||
public long getPageSize() {
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
// noop
|
||||
}
|
||||
|
|
|
@ -42,6 +42,10 @@ public class PrefixFilter implements Filter {
|
|||
super();
|
||||
}
|
||||
|
||||
public byte[] getPrefix() {
|
||||
return prefix;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
// Noop
|
||||
}
|
||||
|
|
|
@ -48,33 +48,39 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
* "{3}[\\d]{1,3})?)(\\/[0-9]+)?"));
|
||||
* </pre>
|
||||
*/
|
||||
public class RegexStringComparator implements WritableByteArrayComparable {
|
||||
public class RegexStringComparator extends WritableByteArrayComparable {
|
||||
|
||||
private Pattern pattern;
|
||||
|
||||
/** Nullary constructor for Writable */
|
||||
public RegexStringComparator() {
|
||||
super();
|
||||
}
|
||||
/** Nullary constructor for Writable, do not use */
|
||||
public RegexStringComparator() { }
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param expr a valid regular expression
|
||||
*/
|
||||
public RegexStringComparator(String expr) {
|
||||
super(Bytes.toBytes(expr));
|
||||
this.pattern = Pattern.compile(expr);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(byte[] value) {
|
||||
// Use find() for subsequence match instead of matches() (full sequence
|
||||
// match) to adhere to the principle of least surprise.
|
||||
return pattern.matcher(Bytes.toString(value)).find() ? 0 : 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.pattern = Pattern.compile(in.readUTF());
|
||||
String expr = in.readUTF();
|
||||
this.value = Bytes.toBytes(expr);
|
||||
this.pattern = Pattern.compile(expr);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeUTF(pattern.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -116,6 +116,34 @@ public class SingleColumnValueFilter implements Filter {
|
|||
this.comparator = comparator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return operator
|
||||
*/
|
||||
public CompareOp getOperator() {
|
||||
return compareOp;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the comparator
|
||||
*/
|
||||
public WritableByteArrayComparable getComparator() {
|
||||
return comparator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the family
|
||||
*/
|
||||
public byte[] getFamily() {
|
||||
return columnFamily;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the qualifier
|
||||
*/
|
||||
public byte[] getQualifier() {
|
||||
return columnQualifier;
|
||||
}
|
||||
|
||||
public boolean filterRowKey(byte[] rowKey, int offset, int length) {
|
||||
// We don't filter on the row key... we filter later on column value so
|
||||
// always return false.
|
||||
|
|
|
@ -56,6 +56,10 @@ public class SkipFilter implements Filter {
|
|||
this.filter = filter;
|
||||
}
|
||||
|
||||
public Filter getFilter() {
|
||||
return filter;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
filter.reset();
|
||||
filterRow = false;
|
||||
|
|
|
@ -40,12 +40,13 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
* new SubstringComparator("substr"));
|
||||
* </pre>
|
||||
*/
|
||||
public class SubstringComparator implements WritableByteArrayComparable {
|
||||
public class SubstringComparator extends WritableByteArrayComparable {
|
||||
|
||||
private String substr;
|
||||
|
||||
/** Nullary constructor for Writable */
|
||||
/** Nullary constructor for Writable, do not use */
|
||||
public SubstringComparator() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -53,17 +54,28 @@ public class SubstringComparator implements WritableByteArrayComparable {
|
|||
* @param substr the substring
|
||||
*/
|
||||
public SubstringComparator(String substr) {
|
||||
super(Bytes.toBytes(substr.toLowerCase()));
|
||||
this.substr = substr.toLowerCase();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getValue() {
|
||||
return Bytes.toBytes(substr);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(byte[] value) {
|
||||
return Bytes.toString(value).toLowerCase().contains(substr) ? 0 : 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
substr = in.readUTF();
|
||||
String substr = in.readUTF();
|
||||
this.value = Bytes.toBytes(substr);
|
||||
this.substr = substr;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeUTF(substr);
|
||||
}
|
||||
|
|
|
@ -45,6 +45,10 @@ public class WhileMatchFilter implements Filter {
|
|||
this.filter = filter;
|
||||
}
|
||||
|
||||
public Filter getFilter() {
|
||||
return filter;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
this.filter.reset();
|
||||
}
|
||||
|
|
|
@ -19,9 +19,48 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.filter;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
|
||||
/** Interface for both Comparable<byte []> and Writable. */
|
||||
public interface WritableByteArrayComparable extends Writable, Comparable<byte[]> {
|
||||
// Not methods, just tie the two interfaces together.
|
||||
/** Base class, combines Comparable<byte []> and Writable. */
|
||||
public abstract class WritableByteArrayComparable implements Writable, Comparable<byte[]> {
|
||||
|
||||
byte[] value;
|
||||
|
||||
/**
|
||||
* Nullary constructor, for Writable
|
||||
*/
|
||||
public WritableByteArrayComparable() { }
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param value the value to compare against
|
||||
*/
|
||||
public WritableByteArrayComparable(byte [] value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public byte[] getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
value = Bytes.readByteArray(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
Bytes.writeByteArray(out, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(byte [] value) {
|
||||
return Bytes.compareTo(this.value, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue