HBASE-2542 Fold stargate contrib into core
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@945816 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ea86b21299
commit
b560475837
|
@ -21,6 +21,7 @@ Release 0.21.0 - Unreleased
|
|||
HBASE-2294 Enumerate ACID properties of HBase in a well defined spec
|
||||
(Todd Lipcon via Stack)
|
||||
HBASE-2541 Remove transactional contrib (Clint Morgan via Stack)
|
||||
HBASE-2542 Fold stargate contrib into core
|
||||
|
||||
BUG FIXES
|
||||
HBASE-1791 Timeout in IndexRecordWriter (Bradford Stephens via Andrew
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
<module>mdc_replication</module>
|
||||
St.Ack Fri Mar 19 13:20:15 PDT 2010
|
||||
-->
|
||||
<module>stargate</module>
|
||||
</modules>
|
||||
|
||||
</project>
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
This product includes software developed by The Apache Software
|
||||
Foundation (http://www.apache.org/).
|
||||
|
||||
In addition, this product includes software developed by:
|
||||
|
||||
The asm jar is copyright INRIA, France Telecom and has the following
|
||||
license: http://asm.ow2.org/license.html
|
||||
|
||||
The jaxb-impl jar is copyright Sun Microsystems Inc. and
|
||||
uses the CDDL 1.0 license:
|
||||
https://glassfish.dev.java.net/public/CDDLv1.0.html
|
||||
|
||||
The jersey jars, jsr311, and persistence-api are copyright
|
||||
Sun Microsystems Inc. and licensed under CDDL 1.0:
|
||||
https://jersey.dev.java.net/CDDL+GPL.html
|
||||
|
||||
Proto-bufs is copyright Google, Inc. and bsd licensed:
|
||||
http://code.google.com/p/protobuf/
|
|
@ -1,151 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-contrib-stargate</artifactId>
|
||||
<version>0.21.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hbase-contrib-stargate-core</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>HBase Contrib - Stargate Core</name>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>test-jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>org/apache/hadoop/hbase/stargate/PerformanceEvaluation</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
<properties>
|
||||
<commons-httpclient.version>3.1</commons-httpclient.version>
|
||||
<jaxb-api.version>2.1</jaxb-api.version>
|
||||
<jersey.version>1.1.5.1</jersey.version>
|
||||
<jsr311.version>1.1.1</jsr311.version>
|
||||
<protobuf.version>2.3.0</protobuf.version>
|
||||
<stax-api>1.0.1</stax-api>
|
||||
<hsqldb.version>1.8.0.10</hsqldb.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${protobuf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-core</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-json</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-server</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
<version>${commons-httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
<version>${jaxb-api.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.ws.rs</groupId>
|
||||
<artifactId>jsr311-api</artifactId>
|
||||
<version>${jsr311.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!--
|
||||
It is unfortunately neccessary to keep hbase-core before hadoop-core.
|
||||
For an explanation see the pom.xml from hbase-core.
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>hbase-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty</artifactId>
|
||||
<version>${jetty.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>servlet-api-2.5</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>stax</groupId>
|
||||
<artifactId>stax-api</artifactId>
|
||||
<version>1.0.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>${hsqldb.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>hbase-core</artifactId>
|
||||
<classifier>tests</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-test</artifactId>
|
||||
<version>${hadoop.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
|
@ -1,373 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import javax.xml.bind.JAXBException;
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Chore;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.stargate.auth.Authenticator;
|
||||
import org.apache.hadoop.hbase.stargate.auth.HBCAuthenticator;
|
||||
import org.apache.hadoop.hbase.stargate.auth.HTableAuthenticator;
|
||||
import org.apache.hadoop.hbase.stargate.auth.JDBCAuthenticator;
|
||||
import org.apache.hadoop.hbase.stargate.auth.ZooKeeperAuthenticator;
|
||||
import org.apache.hadoop.hbase.stargate.metrics.StargateMetrics;
|
||||
import org.apache.hadoop.hbase.stargate.util.HTableTokenBucket;
|
||||
import org.apache.hadoop.hbase.stargate.util.SoftUserData;
|
||||
import org.apache.hadoop.hbase.stargate.util.UserData;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWrapper;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.zookeeper.CreateMode;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.WatchedEvent;
|
||||
import org.apache.zookeeper.Watcher;
|
||||
import org.apache.zookeeper.ZooKeeper;
|
||||
import org.apache.zookeeper.Watcher.Event.EventType;
|
||||
import org.apache.zookeeper.Watcher.Event.KeeperState;
|
||||
import org.apache.zookeeper.ZooDefs.Ids;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
|
||||
import com.sun.jersey.api.json.JSONJAXBContext;
|
||||
import com.sun.jersey.api.json.JSONMarshaller;
|
||||
import com.sun.jersey.server.impl.container.servlet.ServletAdaptor;
|
||||
|
||||
/**
|
||||
* Singleton class encapsulating global REST servlet state and functions.
|
||||
*/
|
||||
public class RESTServlet extends ServletAdaptor
|
||||
implements Constants, Watcher {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(RESTServlet.class);
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static RESTServlet instance;
|
||||
|
||||
@XmlRootElement(name="status")
|
||||
static class StatusModel {
|
||||
@XmlAttribute long requests;
|
||||
@XmlElement List<String> connectors = new ArrayList<String>();
|
||||
public void addConnector(String host, int port) {
|
||||
connectors.add(host + ":" + Integer.toString(port));
|
||||
}
|
||||
}
|
||||
|
||||
class StatusReporter extends Chore {
|
||||
|
||||
final JSONJAXBContext context;
|
||||
final JSONMarshaller marshaller;
|
||||
|
||||
public StatusReporter(int period, AtomicBoolean stopping)
|
||||
throws IOException {
|
||||
super(period, stopping);
|
||||
try {
|
||||
context = new JSONJAXBContext(StatusModel.class);
|
||||
marshaller = context.createJSONMarshaller();
|
||||
} catch (JAXBException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void chore() {
|
||||
if (wrapper != null) try {
|
||||
StatusModel model = new StatusModel();
|
||||
model.requests = (long)metrics.getRequests();
|
||||
for (Pair<String,Integer> e: connectors) {
|
||||
model.addConnector(e.getFirst(), e.getSecond());
|
||||
}
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
marshaller.marshallToJSON(model, os);
|
||||
ensureExists(znode, CreateMode.EPHEMERAL, os.toByteArray());
|
||||
} catch (Exception e) {
|
||||
LOG.error(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final String znode = INSTANCE_ZNODE_ROOT + "/" + System.currentTimeMillis();
|
||||
transient final Configuration conf;
|
||||
transient final HTablePool pool;
|
||||
transient volatile ZooKeeperWrapper wrapper;
|
||||
transient Chore statusReporter;
|
||||
transient Authenticator authenticator;
|
||||
AtomicBoolean stopping = new AtomicBoolean(false);
|
||||
boolean multiuser;
|
||||
Map<String,Integer> maxAgeMap =
|
||||
Collections.synchronizedMap(new HashMap<String,Integer>());
|
||||
List<Pair<String,Integer>> connectors =
|
||||
Collections.synchronizedList(new ArrayList<Pair<String,Integer>>());
|
||||
StargateMetrics metrics = new StargateMetrics();
|
||||
|
||||
/**
|
||||
* @return the RESTServlet singleton instance
|
||||
* @throws IOException
|
||||
*/
|
||||
public synchronized static RESTServlet getInstance() throws IOException {
|
||||
if (instance == null) {
|
||||
instance = new RESTServlet();
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
private boolean ensureExists(final String znode, final CreateMode mode,
|
||||
final byte[] data) {
|
||||
try {
|
||||
ZooKeeper zk = wrapper.getZooKeeper();
|
||||
Stat stat = zk.exists(znode, false);
|
||||
if (stat != null) {
|
||||
zk.setData(znode, data, -1);
|
||||
return true;
|
||||
}
|
||||
zk.create(znode, data, Ids.OPEN_ACL_UNSAFE, mode);
|
||||
LOG.info("Created ZNode " + znode);
|
||||
return true;
|
||||
} catch (KeeperException.NodeExistsException e) {
|
||||
return true; // ok, move on.
|
||||
} catch (KeeperException.NoNodeException e) {
|
||||
return ensureParentExists(znode, CreateMode.PERSISTENT, new byte[]{}) &&
|
||||
ensureExists(znode, mode, data);
|
||||
} catch (KeeperException e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
} catch (InterruptedException e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean ensureParentExists(final String znode, final CreateMode mode,
|
||||
final byte[] data) {
|
||||
int index = znode.lastIndexOf('/');
|
||||
if (index <= 0) { // Parent is root, which always exists.
|
||||
return true;
|
||||
}
|
||||
return ensureExists(znode.substring(0, index), mode, data);
|
||||
}
|
||||
|
||||
ZooKeeperWrapper initZooKeeperWrapper() throws IOException {
|
||||
return new ZooKeeperWrapper(conf, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @throws IOException
|
||||
*/
|
||||
public RESTServlet() throws IOException {
|
||||
this.conf = HBaseConfiguration.create();
|
||||
this.pool = new HTablePool(conf, 10);
|
||||
this.wrapper = initZooKeeperWrapper();
|
||||
this.statusReporter = new StatusReporter(
|
||||
conf.getInt(STATUS_REPORT_PERIOD_KEY, 1000 * 30), stopping);
|
||||
Threads.setDaemonThreadRunning(statusReporter, "Stargate.statusReporter");
|
||||
this.multiuser = conf.getBoolean("stargate.multiuser", false);
|
||||
if (this.multiuser) {
|
||||
LOG.info("multiuser mode enabled");
|
||||
getAuthenticator();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(WatchedEvent event) {
|
||||
LOG.debug(("ZooKeeper.Watcher event " + event.getType() + " with path " +
|
||||
event.getPath()));
|
||||
// handle disconnection (or manual delete to test disconnection scenario)
|
||||
if (event.getState() == KeeperState.Expired ||
|
||||
(event.getType().equals(EventType.NodeDeleted) &&
|
||||
event.getPath().equals(znode))) {
|
||||
wrapper.close();
|
||||
wrapper = null;
|
||||
while (!stopping.get()) try {
|
||||
wrapper = initZooKeeperWrapper();
|
||||
break;
|
||||
} catch (IOException e) {
|
||||
LOG.error(StringUtils.stringifyException(e));
|
||||
try {
|
||||
Thread.sleep(10 * 1000);
|
||||
} catch (InterruptedException ex) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HTablePool getTablePool() {
|
||||
return pool;
|
||||
}
|
||||
|
||||
ZooKeeperWrapper getZooKeeperWrapper() {
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
Configuration getConfiguration() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
StargateMetrics getMetrics() {
|
||||
return metrics;
|
||||
}
|
||||
|
||||
void addConnectorAddress(String host, int port) {
|
||||
connectors.add(new Pair<String,Integer>(host, port));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param tableName the table name
|
||||
* @return the maximum cache age suitable for use with this table, in
|
||||
* seconds
|
||||
* @throws IOException
|
||||
*/
|
||||
public int getMaxAge(String tableName) throws IOException {
|
||||
Integer i = maxAgeMap.get(tableName);
|
||||
if (i != null) {
|
||||
return i.intValue();
|
||||
}
|
||||
HTableInterface table = pool.getTable(tableName);
|
||||
try {
|
||||
int maxAge = DEFAULT_MAX_AGE;
|
||||
for (HColumnDescriptor family :
|
||||
table.getTableDescriptor().getFamilies()) {
|
||||
int ttl = family.getTimeToLive();
|
||||
if (ttl < 0) {
|
||||
continue;
|
||||
}
|
||||
if (ttl < maxAge) {
|
||||
maxAge = ttl;
|
||||
}
|
||||
}
|
||||
maxAgeMap.put(tableName, maxAge);
|
||||
return maxAge;
|
||||
} finally {
|
||||
pool.putTable(table);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Signal that a previously calculated maximum cache age has been
|
||||
* invalidated by a schema change.
|
||||
* @param tableName the table name
|
||||
*/
|
||||
public void invalidateMaxAge(String tableName) {
|
||||
maxAgeMap.remove(tableName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the servlet should operate in multiuser mode
|
||||
*/
|
||||
public boolean isMultiUser() {
|
||||
return multiuser;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param multiuser true if the servlet should operate in multiuser mode
|
||||
*/
|
||||
public void setMultiUser(boolean multiuser) {
|
||||
this.multiuser = multiuser;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return an authenticator
|
||||
*/
|
||||
public Authenticator getAuthenticator() {
|
||||
if (authenticator == null) {
|
||||
String className = conf.get(AUTHENTICATOR_KEY,
|
||||
HBCAuthenticator.class.getCanonicalName());
|
||||
try {
|
||||
Class<?> c = getClass().getClassLoader().loadClass(className);
|
||||
if (className.endsWith(HBCAuthenticator.class.getName()) ||
|
||||
className.endsWith(HTableAuthenticator.class.getName()) ||
|
||||
className.endsWith(JDBCAuthenticator.class.getName())) {
|
||||
Constructor<?> cons = c.getConstructor(HBaseConfiguration.class);
|
||||
authenticator = (Authenticator)
|
||||
cons.newInstance(new Object[] { conf });
|
||||
} else if (className.endsWith(ZooKeeperAuthenticator.class.getName())) {
|
||||
Constructor<?> cons = c.getConstructor(HBaseConfiguration.class,
|
||||
ZooKeeperWrapper.class);
|
||||
authenticator = (Authenticator)
|
||||
cons.newInstance(new Object[] { conf, wrapper });
|
||||
} else {
|
||||
authenticator = (Authenticator)c.newInstance();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.error(StringUtils.stringifyException(e));
|
||||
}
|
||||
if (authenticator == null) {
|
||||
authenticator = new HBCAuthenticator(conf);
|
||||
}
|
||||
LOG.info("using authenticator " + authenticator);
|
||||
}
|
||||
return authenticator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param authenticator
|
||||
*/
|
||||
public void setAuthenticator(Authenticator authenticator) {
|
||||
this.authenticator = authenticator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the user has exceeded their request token limit within the
|
||||
* current interval
|
||||
* @param user the user
|
||||
* @param want the number of tokens desired
|
||||
* @throws IOException
|
||||
*/
|
||||
public boolean userRequestLimit(final User user, int want)
|
||||
throws IOException {
|
||||
if (multiuser) {
|
||||
UserData ud = SoftUserData.get(user);
|
||||
HTableTokenBucket tb = (HTableTokenBucket) ud.get(UserData.TOKENBUCKET);
|
||||
if (tb == null) {
|
||||
tb = new HTableTokenBucket(conf, Bytes.toBytes(user.getToken()));
|
||||
ud.put(UserData.TOKENBUCKET, tb);
|
||||
}
|
||||
if (tb.available() < want) {
|
||||
return false;
|
||||
}
|
||||
tb.remove(want);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,188 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.CacheControl;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.Response;
|
||||
import javax.ws.rs.core.UriInfo;
|
||||
import javax.ws.rs.core.Response.ResponseBuilder;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableListModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableModel;
|
||||
|
||||
@Path("/")
|
||||
public class RootResource implements Constants {
|
||||
private static final Log LOG = LogFactory.getLog(RootResource.class);
|
||||
|
||||
RESTServlet servlet;
|
||||
CacheControl cacheControl;
|
||||
|
||||
public RootResource() throws IOException {
|
||||
servlet = RESTServlet.getInstance();
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
private final User auth(final String token) throws IOException {
|
||||
User user = servlet.getAuthenticator().getUserForToken(token);
|
||||
if (user == null || user.isDisabled()) {
|
||||
throw new WebApplicationException(Response.Status.FORBIDDEN);
|
||||
}
|
||||
return user;
|
||||
}
|
||||
|
||||
private final TableListModel getTableList() throws IOException {
|
||||
TableListModel tableList = new TableListModel();
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
HTableDescriptor[] list = admin.listTables();
|
||||
for (HTableDescriptor htd: list) {
|
||||
tableList.add(new TableModel(htd.getNameAsString()));
|
||||
}
|
||||
return tableList;
|
||||
}
|
||||
|
||||
private final TableListModel getTableListForUser(final User user)
|
||||
throws IOException {
|
||||
TableListModel tableList;
|
||||
if (user.isAdmin()) {
|
||||
tableList = getTableList();
|
||||
} else {
|
||||
tableList = new TableListModel();
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
HTableDescriptor[] list = admin.listTables();
|
||||
String prefix = user.getName() + ".";
|
||||
for (HTableDescriptor htd: list) {
|
||||
String name = htd.getNameAsString();
|
||||
if (!name.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
tableList.add(new TableModel(name.substring(prefix.length())));
|
||||
}
|
||||
}
|
||||
return tableList;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) throws IOException {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
if (servlet.isMultiUser()) {
|
||||
throw new WebApplicationException(Response.Status.BAD_REQUEST);
|
||||
}
|
||||
try {
|
||||
ResponseBuilder response = Response.ok(getTableList());
|
||||
response.cacheControl(cacheControl);
|
||||
return response.build();
|
||||
} catch (IOException e) {
|
||||
throw new WebApplicationException(e,
|
||||
Response.Status.SERVICE_UNAVAILABLE);
|
||||
}
|
||||
}
|
||||
|
||||
@Path("status/cluster")
|
||||
public StorageClusterStatusResource getClusterStatusResource()
|
||||
throws IOException {
|
||||
if (servlet.isMultiUser()) {
|
||||
throw new WebApplicationException(Response.Status.BAD_REQUEST);
|
||||
}
|
||||
return new StorageClusterStatusResource(User.DEFAULT_USER);
|
||||
}
|
||||
|
||||
@Path("version")
|
||||
public VersionResource getVersionResource() throws IOException {
|
||||
return new VersionResource();
|
||||
}
|
||||
|
||||
@Path("{token: [0-9a-fA-F]{32} }") // 128 bit md5 sums
|
||||
public Response getTableRootResource(
|
||||
final @PathParam("token") String token) throws IOException {
|
||||
if (servlet.isMultiUser()) {
|
||||
User user = auth(token);
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
try {
|
||||
ResponseBuilder response = Response.ok(getTableListForUser(user));
|
||||
response.cacheControl(cacheControl);
|
||||
return response.build();
|
||||
} catch (IOException e) {
|
||||
throw new WebApplicationException(e,
|
||||
Response.Status.SERVICE_UNAVAILABLE);
|
||||
}
|
||||
}
|
||||
throw new WebApplicationException(Response.Status.BAD_REQUEST);
|
||||
}
|
||||
|
||||
@Path("{token: [0-9a-fA-F]{32} }/status/cluster") // 128 bit md5 sums
|
||||
public StorageClusterStatusResource getClusterStatusResourceAuthorized(
|
||||
final @PathParam("token") String token) throws IOException {
|
||||
if (servlet.isMultiUser()) {
|
||||
User user = auth(token);
|
||||
if (user != null && user.isAdmin()) {
|
||||
return new StorageClusterStatusResource(user);
|
||||
}
|
||||
throw new WebApplicationException(Response.Status.FORBIDDEN);
|
||||
}
|
||||
throw new WebApplicationException(Response.Status.BAD_REQUEST);
|
||||
}
|
||||
|
||||
@Path("{token: [0-9a-fA-F]{32} }/{table}")
|
||||
public TableResource getTableResource(
|
||||
final @PathParam("token") String token,
|
||||
final @PathParam("table") String table) throws IOException {
|
||||
if (servlet.isMultiUser()) {
|
||||
User user = auth(token);
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
throw new WebApplicationException(Response.status(509).build());
|
||||
}
|
||||
return new TableResource(user, table);
|
||||
}
|
||||
throw new WebApplicationException(Response.Status.BAD_REQUEST);
|
||||
}
|
||||
|
||||
@Path("{table}")
|
||||
public TableResource getTableResource(
|
||||
final @PathParam("table") String table) throws IOException {
|
||||
if (servlet.isMultiUser()) {
|
||||
throw new WebApplicationException(Response.Status.BAD_REQUEST);
|
||||
}
|
||||
return new TableResource(User.DEFAULT_USER, table);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,175 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.security.MessageDigest;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/** Representation of an authorized user */
|
||||
public class User implements Constants {
|
||||
|
||||
public static final User DEFAULT_USER = new User("default",
|
||||
"00000000000000000000000000000000", true, true);
|
||||
|
||||
private String name;
|
||||
private String token;
|
||||
private boolean admin;
|
||||
private boolean disabled = false;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* <p>
|
||||
* Creates an access token. (Normally, you don't want this.)
|
||||
* @param name user name
|
||||
* @param admin true if user has administrator privilege
|
||||
* @throws Exception
|
||||
*/
|
||||
public User(String name, boolean admin) throws Exception {
|
||||
this.name = name;
|
||||
this.admin = admin;
|
||||
byte[] digest = MessageDigest.getInstance("MD5")
|
||||
.digest(Bytes.toBytes(name));
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (int i = 0; i < digest.length; i++) {
|
||||
sb.append(Integer.toHexString(0xff & digest[i]));
|
||||
}
|
||||
this.token = sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param name user name
|
||||
* @param token access token, a 16 char hex string
|
||||
* @param admin true if user has administrator privilege
|
||||
*/
|
||||
public User(String name, String token, boolean admin) {
|
||||
this(name, token, admin, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param name user name
|
||||
* @param token access token, a 16 char hex string
|
||||
* @param admin true if user has administrator privilege
|
||||
* @param disabled true if user is disabled
|
||||
*/
|
||||
public User(String name, String token, boolean admin, boolean disabled) {
|
||||
this.name = name;
|
||||
this.token = token;
|
||||
this.admin = admin;
|
||||
this.disabled = disabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return user name
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param name user name
|
||||
*/
|
||||
public void setName(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return access token, a 16 char hex string
|
||||
*/
|
||||
public String getToken() {
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param token access token, a 16 char hex string
|
||||
*/
|
||||
public void setToken(final String token) {
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if user has administrator privilege
|
||||
*/
|
||||
public boolean isAdmin() {
|
||||
return admin;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param admin true if user has administrator privilege
|
||||
*/
|
||||
public void setAdmin(final boolean admin) {
|
||||
this.admin = admin;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if user is disabled
|
||||
*/
|
||||
public boolean isDisabled() {
|
||||
return disabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param admin true if user is disabled
|
||||
*/
|
||||
public void setDisabled(boolean disabled) {
|
||||
this.disabled = disabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (admin ? 1231 : 1237);
|
||||
result = prime * result + (disabled ? 1231 : 1237);
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
result = prime * result + ((token == null) ? 0 : token.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
User other = (User) obj;
|
||||
if (admin != other.admin)
|
||||
return false;
|
||||
if (disabled != other.disabled)
|
||||
return false;
|
||||
if (name == null) {
|
||||
if (other.name != null)
|
||||
return false;
|
||||
} else if (!name.equals(other.name))
|
||||
return false;
|
||||
if (token == null) {
|
||||
if (other.token != null)
|
||||
return false;
|
||||
} else if (!token.equals(other.token))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
|
||||
public class HBCAuthenticator extends Authenticator {
|
||||
|
||||
Configuration conf;
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
public HBCAuthenticator() {
|
||||
this(HBaseConfiguration.create());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
*/
|
||||
public HBCAuthenticator(Configuration conf) {
|
||||
this.conf = conf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public User getUserForToken(String token) {
|
||||
String name = conf.get("stargate.auth.token." + token);
|
||||
if (name == null) {
|
||||
return null;
|
||||
}
|
||||
boolean admin = conf.getBoolean("stargate.auth.user." + name + ".admin",
|
||||
false);
|
||||
boolean disabled = conf.getBoolean("stargate.auth.user." + name + ".disabled",
|
||||
false);
|
||||
return new User(name, token, admin, disabled);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class HTableAuthenticator extends Authenticator implements Constants {
|
||||
|
||||
static final byte[] USER = Bytes.toBytes("user");
|
||||
static final byte[] NAME = Bytes.toBytes("name");
|
||||
static final byte[] ADMIN = Bytes.toBytes("admin");
|
||||
static final byte[] DISABLED = Bytes.toBytes("disabled");
|
||||
|
||||
Configuration conf;
|
||||
String tableName;
|
||||
HTable table;
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
public HTableAuthenticator() {
|
||||
this(HBaseConfiguration.create());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
*/
|
||||
public HTableAuthenticator(Configuration conf) {
|
||||
this(conf, conf.get("stargate.auth.htable.name", USERS_TABLE));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
* @param tableName
|
||||
*/
|
||||
public HTableAuthenticator(Configuration conf, String tableName) {
|
||||
this.conf = conf;
|
||||
this.tableName = tableName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
* @param table
|
||||
*/
|
||||
public HTableAuthenticator(Configuration conf, HTable table) {
|
||||
this.conf = conf;
|
||||
this.table = table;
|
||||
this.tableName = Bytes.toString(table.getTableName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public User getUserForToken(String token) throws IOException {
|
||||
if (table == null) {
|
||||
this.table = new HTable(conf, tableName);
|
||||
}
|
||||
Get get = new Get(Bytes.toBytes(token));
|
||||
get.addColumn(USER, NAME);
|
||||
get.addColumn(USER, ADMIN);
|
||||
get.addColumn(USER, DISABLED);
|
||||
Result result = table.get(get);
|
||||
byte[] value = result.getValue(USER, NAME);
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
String name = Bytes.toString(value);
|
||||
boolean admin = false;
|
||||
value = result.getValue(USER, ADMIN);
|
||||
if (value != null) {
|
||||
admin = Bytes.toBoolean(value);
|
||||
}
|
||||
boolean disabled = false;
|
||||
value = result.getValue(USER, DISABLED);
|
||||
if (value != null) {
|
||||
disabled = Bytes.toBoolean(value);
|
||||
}
|
||||
return new User(name, token, admin, disabled);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,108 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
public class JDBCAuthenticator extends Authenticator {
|
||||
|
||||
static final Log LOG = LogFactory.getLog(JDBCAuthenticator.class);
|
||||
static final int MAX_RETRIES = 5;
|
||||
static final long RETRY_SLEEP_TIME = 1000 * 2;
|
||||
|
||||
String url;
|
||||
String table;
|
||||
String user;
|
||||
String password;
|
||||
Connection connection;
|
||||
PreparedStatement userFetchStmt;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
*/
|
||||
public JDBCAuthenticator(HBaseConfiguration conf) {
|
||||
this(conf.get("stargate.auth.jdbc.url"),
|
||||
conf.get("stargate.auth.jdbc.table"),
|
||||
conf.get("stargate.auth.jdbc.user"),
|
||||
conf.get("stargate.auth.jdbc.password"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param url
|
||||
* @param table
|
||||
* @param user
|
||||
* @param password
|
||||
*/
|
||||
public JDBCAuthenticator(String url, String table, String user,
|
||||
String password) {
|
||||
this.url = url;
|
||||
this.table = table;
|
||||
this.user = user;
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
@Override
|
||||
public User getUserForToken(String token) throws IOException {
|
||||
int retries = 0;
|
||||
while (true) try {
|
||||
if (connection == null) {
|
||||
connection = DriverManager.getConnection(url, user, password);
|
||||
userFetchStmt = connection.prepareStatement(
|
||||
"SELECT name, admin, disabled FROM " + table + " WHERE token = ?");
|
||||
}
|
||||
ResultSet results;
|
||||
synchronized (userFetchStmt) {
|
||||
userFetchStmt.setString(1, token);
|
||||
results = userFetchStmt.executeQuery();
|
||||
}
|
||||
if (!results.next()) {
|
||||
return null;
|
||||
}
|
||||
return new User(results.getString(1), token, results.getBoolean(2),
|
||||
results.getBoolean(3));
|
||||
} catch (SQLException e) {
|
||||
connection = null;
|
||||
if (++retries > MAX_RETRIES) {
|
||||
throw new IOException(e);
|
||||
} else try {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
Thread.sleep(RETRY_SLEEP_TIME);
|
||||
} catch (InterruptedException ex) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,149 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWrapper;
|
||||
|
||||
import org.apache.zookeeper.CreateMode;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.WatchedEvent;
|
||||
import org.apache.zookeeper.Watcher;
|
||||
import org.apache.zookeeper.ZooKeeper;
|
||||
import org.apache.zookeeper.ZooDefs.Ids;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
|
||||
import com.sun.jersey.api.json.JSONConfiguration;
|
||||
import com.sun.jersey.api.json.JSONJAXBContext;
|
||||
import com.sun.jersey.api.json.JSONUnmarshaller;
|
||||
|
||||
/**
|
||||
* A simple authenticator module for ZooKeeper.
|
||||
* <pre>
|
||||
* /stargate/
|
||||
* users/
|
||||
* <token></pre>
|
||||
* Where <tt><token></tt> is a JSON formatted user record with the keys
|
||||
* 'name' (String, required), 'token' (String, optional), 'admin' (boolean,
|
||||
* optional), and 'disabled' (boolean, optional).
|
||||
*/
|
||||
public class ZooKeeperAuthenticator extends Authenticator
|
||||
implements Constants {
|
||||
|
||||
@XmlRootElement(name="user")
|
||||
static class UserModel {
|
||||
@XmlAttribute public String name;
|
||||
@XmlAttribute public boolean admin = false;
|
||||
@XmlAttribute public boolean disabled = false;
|
||||
}
|
||||
|
||||
final String usersZNode;
|
||||
ZooKeeperWrapper wrapper;
|
||||
final JSONJAXBContext context;
|
||||
final JSONUnmarshaller unmarshaller;
|
||||
|
||||
private boolean ensureParentExists(final String znode) {
|
||||
int index = znode.lastIndexOf("/");
|
||||
if (index <= 0) { // Parent is root, which always exists.
|
||||
return true;
|
||||
}
|
||||
return ensureExists(znode.substring(0, index));
|
||||
}
|
||||
|
||||
private boolean ensureExists(final String znode) {
|
||||
ZooKeeper zk = wrapper.getZooKeeper();
|
||||
try {
|
||||
Stat stat = zk.exists(znode, false);
|
||||
if (stat != null) {
|
||||
return true;
|
||||
}
|
||||
zk.create(znode, new byte[0], Ids.OPEN_ACL_UNSAFE,
|
||||
CreateMode.PERSISTENT);
|
||||
return true;
|
||||
} catch (KeeperException.NodeExistsException e) {
|
||||
return true; // ok, move on.
|
||||
} catch (KeeperException.NoNodeException e) {
|
||||
return ensureParentExists(znode) && ensureExists(znode);
|
||||
} catch (KeeperException e) {
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
* @throws IOException
|
||||
*/
|
||||
public ZooKeeperAuthenticator(Configuration conf) throws IOException {
|
||||
this(conf, new ZooKeeperWrapper(conf, new Watcher() {
|
||||
public void process(WatchedEvent event) { }
|
||||
}));
|
||||
ensureExists(USERS_ZNODE_ROOT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf
|
||||
* @param wrapper
|
||||
* @throws IOException
|
||||
*/
|
||||
public ZooKeeperAuthenticator(Configuration conf,
|
||||
ZooKeeperWrapper wrapper) throws IOException {
|
||||
this.usersZNode = conf.get("stargate.auth.zk.users", USERS_ZNODE_ROOT);
|
||||
this.wrapper = wrapper;
|
||||
try {
|
||||
this.context = new JSONJAXBContext(JSONConfiguration.natural().build(),
|
||||
UserModel.class);
|
||||
this.unmarshaller = context.createJSONUnmarshaller();
|
||||
} catch (Exception e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public User getUserForToken(String token) throws IOException {
|
||||
ZooKeeper zk = wrapper.getZooKeeper();
|
||||
try {
|
||||
byte[] data = zk.getData(usersZNode + "/" + token, null, null);
|
||||
if (data == null) {
|
||||
return null;
|
||||
}
|
||||
UserModel model =
|
||||
unmarshaller.unmarshalFromJSON(new ByteArrayInputStream(data),
|
||||
UserModel.class);
|
||||
return new User(model.name, token, model.admin, model.disabled);
|
||||
} catch (KeeperException.NoNodeException e) {
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,193 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.client.RowLock;
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A HTable-backed token bucket.
|
||||
* <p>
|
||||
* Can be configured with <t>rate</t>, the number of tokens to add to the
|
||||
* bucket each second, and <t>size</t>, the maximum number of tokens allowed
|
||||
* to burst. Configuration is stored in the HTable adjacent to the token
|
||||
* count and is periodically refreshed.
|
||||
* <p>
|
||||
* Expected columns:
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>user:
|
||||
* <ul>
|
||||
* <li>user:tokens</li>
|
||||
* <li>user:tokens.rate</li>
|
||||
* <li>user:tokens.size</li>
|
||||
* </ul></li>
|
||||
* </ul>
|
||||
*/
|
||||
public class HTableTokenBucket implements Constants {
|
||||
|
||||
static final Log LOG = LogFactory.getLog(HTableTokenBucket.class);
|
||||
|
||||
static final byte[] USER = Bytes.toBytes("user");
|
||||
static final byte[] TOKENS = Bytes.toBytes("tokens");
|
||||
static final byte[] TOKENS_RATE = Bytes.toBytes("tokens.rate");
|
||||
static final byte[] TOKENS_SIZE = Bytes.toBytes("tokens.size");
|
||||
|
||||
Configuration conf;
|
||||
String tableName;
|
||||
HTable table;
|
||||
byte[] row;
|
||||
int tokens;
|
||||
double rate = 20.0; // default, 20 ops added per second
|
||||
int size = 100; // burst
|
||||
long lastUpdated = System.currentTimeMillis();
|
||||
long configUpdateInterval;
|
||||
long lastConfigUpdated = System.currentTimeMillis();
|
||||
|
||||
void updateConfig() throws IOException {
|
||||
Get get = new Get(row);
|
||||
get.addColumn(USER, TOKENS_RATE);
|
||||
get.addColumn(USER, TOKENS_SIZE);
|
||||
Result result = table.get(get);
|
||||
byte[] value = result.getValue(USER, TOKENS_RATE);
|
||||
if (value != null) {
|
||||
this.rate = (int)Bytes.toDouble(value);
|
||||
}
|
||||
value = result.getValue(USER, TOKENS_SIZE);
|
||||
if (value != null) {
|
||||
this.size = (int)Bytes.toLong(value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf configuration
|
||||
* @param row row key for user
|
||||
* @throws IOException
|
||||
*/
|
||||
public HTableTokenBucket(Configuration conf, byte[] row)
|
||||
throws IOException {
|
||||
this(conf, conf.get("stargate.tb.htable.name", USERS_TABLE), row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param conf configuration
|
||||
* @param tableName the table to use
|
||||
* @param row row key for user
|
||||
* @throws IOException
|
||||
*/
|
||||
public HTableTokenBucket(Configuration conf, String tableName,
|
||||
byte[] row) throws IOException {
|
||||
this.conf = conf;
|
||||
this.tableName = tableName;
|
||||
this.row = row;
|
||||
this.table = new HTable(conf, tableName);
|
||||
this.configUpdateInterval =
|
||||
conf.getLong("stargate.tb.update.interval", 1000 * 60);
|
||||
updateConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of remaining tokens in the bucket (roughly)
|
||||
* @throws IOException
|
||||
*/
|
||||
public int available() throws IOException {
|
||||
long now = System.currentTimeMillis();
|
||||
if (now - lastConfigUpdated > configUpdateInterval) {
|
||||
try {
|
||||
updateConfig();
|
||||
} catch (IOException e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
lastConfigUpdated = now;
|
||||
}
|
||||
|
||||
// We can't simply use incrementColumnValue here because the timestamp of
|
||||
// the keyvalue will not be changed as long as it remains in memstore, so
|
||||
// there will be some unavoidable contention on the row if multiple
|
||||
// Stargate instances are concurrently serving the same user, and three
|
||||
// more round trips than otherwise.
|
||||
RowLock rl = table.lockRow(row);
|
||||
try {
|
||||
Get get = new Get(row, rl);
|
||||
get.addColumn(USER, TOKENS);
|
||||
List<KeyValue> kvs = table.get(get).list();
|
||||
if (kvs != null && !kvs.isEmpty()) {
|
||||
KeyValue kv = kvs.get(0);
|
||||
tokens = (int)Bytes.toLong(kv.getValue());
|
||||
lastUpdated = kv.getTimestamp();
|
||||
} else {
|
||||
tokens = (int)rate;
|
||||
}
|
||||
long elapsed = now - lastUpdated;
|
||||
int i = (int)((elapsed / 1000) * rate); // convert sec <-> ms
|
||||
if (tokens + i > size) {
|
||||
i = size - tokens;
|
||||
}
|
||||
if (i > 0) {
|
||||
tokens += i;
|
||||
Put put = new Put(row, rl);
|
||||
put.add(USER, TOKENS, Bytes.toBytes((long)tokens));
|
||||
put.setWriteToWAL(false);
|
||||
table.put(put);
|
||||
table.flushCommits();
|
||||
}
|
||||
} finally {
|
||||
table.unlockRow(rl);
|
||||
}
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param t the number of tokens to consume from the bucket
|
||||
* @throws IOException
|
||||
*/
|
||||
public void remove(int t) throws IOException {
|
||||
// Here we don't care about timestamp changes; actually it's advantageous
|
||||
// if they are not updated, otherwise available() and remove() must be
|
||||
// used as near to each other in time as possible.
|
||||
table.incrementColumnValue(row, USER, TOKENS, (long) -t, false);
|
||||
}
|
||||
|
||||
public double getRate() {
|
||||
return rate;
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.util;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.util.SoftValueMap;
|
||||
|
||||
/**
|
||||
* Provides a softmap backed collection of user data. The collection can be
|
||||
* reclaimed by the garbage collector at any time when under heap pressure.
|
||||
*/
|
||||
public class SoftUserData extends UserData {
|
||||
|
||||
static final Map<User,UserData> map = new SoftValueMap<User,UserData>();
|
||||
|
||||
public static synchronized UserData get(final User user) {
|
||||
UserData data = map.get(user);
|
||||
if (data == null) {
|
||||
data = new UserData();
|
||||
map.put(user, data);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
public static synchronized UserData put(final User user,
|
||||
final UserData data) {
|
||||
return map.put(user, data);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.util;
|
||||
|
||||
public class TokenBucket {
|
||||
|
||||
private int tokens;
|
||||
private int rate;
|
||||
private int size;
|
||||
private long lastUpdated;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param rate limit in units per second
|
||||
* @param size maximum burst in units per second
|
||||
*/
|
||||
public TokenBucket(int rate, int size) {
|
||||
this.rate = rate;
|
||||
this.tokens = this.size = size;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of remaining tokens in the bucket
|
||||
*/
|
||||
public int available() {
|
||||
long now = System.currentTimeMillis();
|
||||
long elapsed = now - lastUpdated;
|
||||
lastUpdated = now;
|
||||
tokens += elapsed * rate;
|
||||
if (tokens > size) {
|
||||
tokens = size;
|
||||
}
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param t the number of tokens to consume from the bucket
|
||||
*/
|
||||
public void remove(int t) {
|
||||
tokens -= t;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.util;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Generic storage for per user information.
|
||||
*/
|
||||
public class UserData {
|
||||
|
||||
public static final int TOKENBUCKET = 1;
|
||||
|
||||
Map<Integer,Object> data = new HashMap<Integer,Object>(1);
|
||||
|
||||
public synchronized boolean has(final int sel) {
|
||||
return data.get(sel) != null;
|
||||
}
|
||||
|
||||
public synchronized Object get(final int sel) {
|
||||
return data.get(sel);
|
||||
}
|
||||
|
||||
public synchronized Object put(final int sel, final Object o) {
|
||||
return data.put(sel, o);
|
||||
}
|
||||
|
||||
public synchronized Object remove(int sel) {
|
||||
return data.remove(sel);
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,144 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<schema targetNamespace="ModelSchema" elementFormDefault="qualified" xmlns="http://www.w3.org/2001/XMLSchema" xmlns:tns="ModelSchema">
|
||||
|
||||
<element name="CellSet" type="tns:CellSet"></element>
|
||||
|
||||
<complexType name="CellSet">
|
||||
<sequence>
|
||||
<element name="row" type="tns:Row" maxOccurs="unbounded" minOccurs="1"></element>
|
||||
</sequence>
|
||||
</complexType>
|
||||
|
||||
<complexType name="Row">
|
||||
<sequence>
|
||||
<element name="key" type="base64Binary"></element>
|
||||
<element name="cell" type="tns:Cell" maxOccurs="unbounded" minOccurs="1"></element>
|
||||
</sequence>
|
||||
</complexType>
|
||||
|
||||
<complexType name="Cell">
|
||||
<sequence>
|
||||
<element name="value" maxOccurs="1" minOccurs="1"><simpleType><restriction base="base64Binary"></restriction></simpleType></element>
|
||||
</sequence>
|
||||
<attribute name="column" type="base64Binary" />
|
||||
<attribute name="timestamp" type="int" />
|
||||
</complexType>
|
||||
|
||||
<element name="Version" type="tns:Version"></element>
|
||||
|
||||
<complexType name="Version">
|
||||
<attribute name="Stargate" type="string"></attribute>
|
||||
<attribute name="JVM" type="string"></attribute>
|
||||
<attribute name="OS" type="string"></attribute>
|
||||
<attribute name="Server" type="string"></attribute>
|
||||
<attribute name="Jersey" type="string"></attribute>
|
||||
</complexType>
|
||||
|
||||
|
||||
<element name="TableList" type="tns:TableList"></element>
|
||||
|
||||
<complexType name="TableList">
|
||||
<sequence>
|
||||
<element name="table" type="tns:Table" maxOccurs="unbounded" minOccurs="1"></element>
|
||||
</sequence>
|
||||
</complexType>
|
||||
|
||||
<complexType name="Table">
|
||||
<sequence>
|
||||
<element name="name" type="string"></element>
|
||||
</sequence>
|
||||
</complexType>
|
||||
|
||||
<element name="TableInfo" type="tns:TableInfo"></element>
|
||||
|
||||
<complexType name="TableInfo">
|
||||
<sequence>
|
||||
<element name="region" type="tns:TableRegion" maxOccurs="unbounded" minOccurs="1"></element>
|
||||
</sequence>
|
||||
<attribute name="name" type="string"></attribute>
|
||||
</complexType>
|
||||
|
||||
<complexType name="TableRegion">
|
||||
<attribute name="name" type="string"></attribute>
|
||||
<attribute name="id" type="int"></attribute>
|
||||
<attribute name="startKey" type="base64Binary"></attribute>
|
||||
<attribute name="endKey" type="base64Binary"></attribute>
|
||||
<attribute name="location" type="string"></attribute>
|
||||
</complexType>
|
||||
|
||||
<element name="TableSchema" type="tns:TableSchema"></element>
|
||||
|
||||
<complexType name="TableSchema">
|
||||
<sequence>
|
||||
<element name="column" type="tns:ColumnSchema" maxOccurs="unbounded" minOccurs="1"></element>
|
||||
</sequence>
|
||||
<attribute name="name" type="string"></attribute>
|
||||
<anyAttribute></anyAttribute>
|
||||
</complexType>
|
||||
|
||||
<complexType name="ColumnSchema">
|
||||
<attribute name="name" type="string"></attribute>
|
||||
<anyAttribute></anyAttribute>
|
||||
</complexType>
|
||||
|
||||
<element name="Scanner" type="tns:Scanner"></element>
|
||||
|
||||
<complexType name="Scanner">
|
||||
<sequence>
|
||||
<element name="column" type="base64Binary" minOccurs="0" maxOccurs="unbounded"></element>
|
||||
</sequence>
|
||||
<attribute name="startRow" type="base64Binary"></attribute>
|
||||
<attribute name="endRow" type="base64Binary"></attribute>
|
||||
<attribute name="batch" type="int"></attribute>
|
||||
<attribute name="startTime" type="int"></attribute>
|
||||
<attribute name="endTime" type="int"></attribute>
|
||||
</complexType>
|
||||
|
||||
<element name="StorageClusterVersion"
|
||||
type="tns:StorageClusterVersion">
|
||||
</element>
|
||||
|
||||
<complexType name="StorageClusterVersion">
|
||||
<attribute name="version" type="string"></attribute>
|
||||
</complexType>
|
||||
|
||||
<element name="StorageClusterStatus"
|
||||
type="tns:StorageClusterStatus">
|
||||
</element>
|
||||
|
||||
<complexType name="StorageClusterStatus">
|
||||
<sequence>
|
||||
<element name="liveNode" type="tns:Node"
|
||||
maxOccurs="unbounded" minOccurs="0">
|
||||
</element>
|
||||
<element name="deadNode" type="string" maxOccurs="unbounded"
|
||||
minOccurs="0">
|
||||
</element>
|
||||
</sequence>
|
||||
<attribute name="regions" type="int"></attribute>
|
||||
<attribute name="requests" type="int"></attribute>
|
||||
<attribute name="averageLoad" type="float"></attribute>
|
||||
</complexType>
|
||||
|
||||
<complexType name="Node">
|
||||
<sequence>
|
||||
<element name="region" type="tns:Region"
|
||||
maxOccurs="unbounded" minOccurs="0">
|
||||
</element>
|
||||
</sequence>
|
||||
<attribute name="name" type="string"></attribute>
|
||||
<attribute name="startCode" type="int"></attribute>
|
||||
<attribute name="requests" type="int"></attribute>
|
||||
<attribute name="heapSizeMB" type="int"></attribute>
|
||||
<attribute name="maxHeapSizeMB" type="int"></attribute>
|
||||
</complexType>
|
||||
|
||||
<complexType name="Region">
|
||||
<attribute name="name" type="base64Binary"></attribute>
|
||||
<attribute name="stores" type="int"></attribute>
|
||||
<attribute name="storefiles" type="int"></attribute>
|
||||
<attribute name="storefileSizeMB" type="int"></attribute>
|
||||
<attribute name="memstoreSizeMB" type="int"></attribute>
|
||||
<attribute name="storefileIndexSizeMB" type="int"></attribute>
|
||||
</complexType>
|
||||
</schema>
|
|
@ -1,238 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
import org.apache.hadoop.hbase.MiniZooKeeperCluster;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.mortbay.jetty.Server;
|
||||
import org.mortbay.jetty.servlet.Context;
|
||||
import org.mortbay.jetty.servlet.ServletHolder;
|
||||
|
||||
import com.sun.jersey.spi.container.servlet.ServletContainer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class MiniClusterTestBase extends TestCase {
|
||||
protected static final Log LOG =
|
||||
LogFactory.getLog(MiniClusterTestBase.class);
|
||||
|
||||
public static final String MIMETYPE_BINARY = "application/octet-stream";
|
||||
public static final String MIMETYPE_JSON = "application/json";
|
||||
public static final String MIMETYPE_PLAIN = "text/plain";
|
||||
public static final String MIMETYPE_PROTOBUF = "application/x-protobuf";
|
||||
public static final String MIMETYPE_XML = "text/xml";
|
||||
|
||||
// use a nonstandard port
|
||||
public static final int DEFAULT_TEST_PORT = 38080;
|
||||
|
||||
protected static Configuration conf = HBaseConfiguration.create();
|
||||
protected static MiniZooKeeperCluster zooKeeperCluster;
|
||||
protected static MiniHBaseCluster hbaseCluster;
|
||||
protected static MiniDFSCluster dfsCluster;
|
||||
protected static File testDir;
|
||||
protected static int testServletPort;
|
||||
protected static Server server;
|
||||
|
||||
public static boolean isMiniClusterRunning() {
|
||||
return server != null;
|
||||
}
|
||||
|
||||
private static void startDFS() throws Exception {
|
||||
if (dfsCluster != null) {
|
||||
LOG.error("MiniDFSCluster already running");
|
||||
return;
|
||||
}
|
||||
// This spews a bunch of warnings about missing scheme. TODO: fix.
|
||||
dfsCluster = new MiniDFSCluster(0, conf, 2, true, true, true,
|
||||
null, null, null, null);
|
||||
// mangle the conf so that the fs parameter points to the minidfs we
|
||||
// just started up
|
||||
FileSystem filesystem = dfsCluster.getFileSystem();
|
||||
conf.set("fs.defaultFS", filesystem.getUri().toString());
|
||||
Path parentdir = filesystem.getHomeDirectory();
|
||||
conf.set(HConstants.HBASE_DIR, parentdir.toString());
|
||||
filesystem.mkdirs(parentdir);
|
||||
FSUtils.setVersion(filesystem, parentdir);
|
||||
}
|
||||
|
||||
private static void stopDFS() {
|
||||
if (dfsCluster != null) try {
|
||||
FileSystem fs = dfsCluster.getFileSystem();
|
||||
if (fs != null) {
|
||||
LOG.info("Shutting down FileSystem");
|
||||
fs.close();
|
||||
}
|
||||
FileSystem.closeAll();
|
||||
dfsCluster = null;
|
||||
} catch (Exception e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
|
||||
private static void startZooKeeper() throws Exception {
|
||||
if (zooKeeperCluster != null) {
|
||||
LOG.error("ZooKeeper already running");
|
||||
return;
|
||||
}
|
||||
zooKeeperCluster = new MiniZooKeeperCluster();
|
||||
zooKeeperCluster.startup(testDir);
|
||||
LOG.info("started " + zooKeeperCluster.getClass().getName());
|
||||
}
|
||||
|
||||
private static void stopZooKeeper() {
|
||||
if (zooKeeperCluster != null) try {
|
||||
zooKeeperCluster.shutdown();
|
||||
zooKeeperCluster = null;
|
||||
} catch (Exception e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
|
||||
private static void startHBase() throws Exception {
|
||||
if (hbaseCluster != null) {
|
||||
LOG.error("MiniHBaseCluster already running");
|
||||
return;
|
||||
}
|
||||
hbaseCluster = new MiniHBaseCluster(conf, 1);
|
||||
// opening the META table ensures that cluster is running
|
||||
new HTable(conf, HConstants.META_TABLE_NAME);
|
||||
LOG.info("started MiniHBaseCluster");
|
||||
}
|
||||
|
||||
private static void stopHBase() {
|
||||
if (hbaseCluster != null) try {
|
||||
HConnectionManager.deleteConnectionInfo(conf, true);
|
||||
hbaseCluster.shutdown();
|
||||
hbaseCluster = null;
|
||||
} catch (Exception e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
|
||||
private static void startServletContainer() throws Exception {
|
||||
if (server != null) {
|
||||
LOG.error("ServletContainer already running");
|
||||
return;
|
||||
}
|
||||
|
||||
// set up the Jersey servlet container for Jetty
|
||||
ServletHolder sh = new ServletHolder(ServletContainer.class);
|
||||
sh.setInitParameter(
|
||||
"com.sun.jersey.config.property.resourceConfigClass",
|
||||
ResourceConfig.class.getCanonicalName());
|
||||
sh.setInitParameter("com.sun.jersey.config.property.packages",
|
||||
"jetty");
|
||||
|
||||
LOG.info("configured " + ServletContainer.class.getName());
|
||||
|
||||
// set up Jetty and run the embedded server
|
||||
testServletPort = conf.getInt("test.stargate.port", DEFAULT_TEST_PORT);
|
||||
server = new Server(testServletPort);
|
||||
server.setSendServerVersion(false);
|
||||
server.setSendDateHeader(false);
|
||||
// set up context
|
||||
Context context = new Context(server, "/", Context.SESSIONS);
|
||||
context.addServlet(sh, "/*");
|
||||
// start the server
|
||||
server.start();
|
||||
|
||||
LOG.info("started " + server.getClass().getName() + " on port " +
|
||||
testServletPort);
|
||||
}
|
||||
|
||||
private static void stopServletContainer() {
|
||||
if (server != null) try {
|
||||
server.stop();
|
||||
server = null;
|
||||
} catch (Exception e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
|
||||
public static void startMiniCluster() throws Exception {
|
||||
try {
|
||||
startDFS();
|
||||
startZooKeeper();
|
||||
startHBase();
|
||||
startServletContainer();
|
||||
} catch (Exception e) {
|
||||
stopServletContainer();
|
||||
stopHBase();
|
||||
stopZooKeeper();
|
||||
stopDFS();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public static void stopMiniCluster() {
|
||||
stopServletContainer();
|
||||
stopHBase();
|
||||
stopZooKeeper();
|
||||
stopDFS();
|
||||
}
|
||||
|
||||
static class MiniClusterShutdownThread extends Thread {
|
||||
public void run() {
|
||||
stopMiniCluster();
|
||||
Path path = new Path(
|
||||
conf.get("test.build.data",
|
||||
System.getProperty("test.build.data", "build/test/data")));
|
||||
try {
|
||||
FileSystem.get(conf).delete(path, true);
|
||||
} catch (IOException e) {
|
||||
LOG.error(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
// start the mini cluster if it is not running yet
|
||||
if (!isMiniClusterRunning()) {
|
||||
startMiniCluster();
|
||||
Runtime.getRuntime().addShutdownHook(new MiniClusterShutdownThread());
|
||||
}
|
||||
|
||||
// tell HttpClient to dump request and response headers into the test
|
||||
// log at DEBUG level
|
||||
Logger.getLogger("httpclient.wire.header").setLevel(Level.DEBUG);
|
||||
|
||||
super.setUp();
|
||||
}
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Copyright 2009 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
|
||||
public class Test00MiniCluster extends MiniClusterTestBase {
|
||||
public void testDFSMiniCluster() {
|
||||
assertNotNull(dfsCluster);
|
||||
}
|
||||
|
||||
public void testZooKeeperMiniCluster() {
|
||||
assertNotNull(zooKeeperCluster);
|
||||
}
|
||||
|
||||
public void testHBaseMiniCluster() throws IOException {
|
||||
assertNotNull(hbaseCluster);
|
||||
assertNotNull(new HTable(conf, HConstants.META_TABLE_NAME));
|
||||
}
|
||||
|
||||
public void testStargateServlet() throws IOException {
|
||||
assertNotNull(server);
|
||||
}
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestHBCAuthenticator extends TestCase {
|
||||
|
||||
static final String UNKNOWN_TOKEN = "00000000000000000000000000000000";
|
||||
static final String ADMIN_TOKEN = "e998efffc67c49c6e14921229a51b7b3";
|
||||
static final String ADMIN_USERNAME = "testAdmin";
|
||||
static final String USER_TOKEN = "da4829144e3a2febd909a6e1b4ed7cfa";
|
||||
static final String USER_USERNAME = "testUser";
|
||||
static final String DISABLED_TOKEN = "17de5b5db0fd3de0847bd95396f36d92";
|
||||
static final String DISABLED_USERNAME = "disabledUser";
|
||||
|
||||
static Configuration conf;
|
||||
static HBCAuthenticator authenticator;
|
||||
static {
|
||||
conf = HBaseConfiguration.create();
|
||||
conf.set("stargate.auth.token." + USER_TOKEN, USER_USERNAME);
|
||||
conf.set("stargate.auth.user." + USER_USERNAME + ".admin", "false");
|
||||
conf.set("stargate.auth.user." + USER_USERNAME + ".disabled", "false");
|
||||
conf.set("stargate.auth.token." + ADMIN_TOKEN, ADMIN_USERNAME);
|
||||
conf.set("stargate.auth.user." + ADMIN_USERNAME + ".admin", "true");
|
||||
conf.set("stargate.auth.user." + ADMIN_USERNAME + ".disabled", "false");
|
||||
conf.set("stargate.auth.token." + DISABLED_TOKEN, DISABLED_USERNAME);
|
||||
conf.set("stargate.auth.user." + DISABLED_USERNAME + ".admin", "false");
|
||||
conf.set("stargate.auth.user." + DISABLED_USERNAME + ".disabled", "true");
|
||||
authenticator = new HBCAuthenticator(conf);
|
||||
}
|
||||
|
||||
public void testGetUserUnknown() throws Exception {
|
||||
User user = authenticator.getUserForToken(UNKNOWN_TOKEN);
|
||||
assertNull(user);
|
||||
}
|
||||
|
||||
public void testGetAdminUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(ADMIN_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), ADMIN_USERNAME);
|
||||
assertTrue(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetPlainUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(USER_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), USER_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetDisabledUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(DISABLED_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), DISABLED_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertTrue(user.isDisabled());
|
||||
}
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.stargate.MiniClusterTestBase;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestHTableAuthenticator extends MiniClusterTestBase {
|
||||
|
||||
static final String UNKNOWN_TOKEN = "00000000000000000000000000000000";
|
||||
static final String ADMIN_TOKEN = "e998efffc67c49c6e14921229a51b7b3";
|
||||
static final String ADMIN_USERNAME = "testAdmin";
|
||||
static final String USER_TOKEN = "da4829144e3a2febd909a6e1b4ed7cfa";
|
||||
static final String USER_USERNAME = "testUser";
|
||||
static final String DISABLED_TOKEN = "17de5b5db0fd3de0847bd95396f36d92";
|
||||
static final String DISABLED_USERNAME = "disabledUser";
|
||||
|
||||
static final String TABLE = "TestHTableAuthenticator";
|
||||
static final byte[] USER = Bytes.toBytes("user");
|
||||
static final byte[] NAME = Bytes.toBytes("name");
|
||||
static final byte[] ADMIN = Bytes.toBytes("admin");
|
||||
static final byte[] DISABLED = Bytes.toBytes("disabled");
|
||||
|
||||
HTableAuthenticator authenticator;
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||
if (!admin.tableExists(TABLE)) {
|
||||
HTableDescriptor htd = new HTableDescriptor(TABLE);
|
||||
htd.addFamily(new HColumnDescriptor(USER));
|
||||
admin.createTable(htd);
|
||||
HTable table = new HTable(conf, TABLE);
|
||||
Put put = new Put(Bytes.toBytes(ADMIN_TOKEN));
|
||||
put.add(USER, NAME, Bytes.toBytes(ADMIN_USERNAME));
|
||||
put.add(USER, ADMIN, Bytes.toBytes(true));
|
||||
table.put(put);
|
||||
put = new Put(Bytes.toBytes(USER_TOKEN));
|
||||
put.add(USER, NAME, Bytes.toBytes(USER_USERNAME));
|
||||
put.add(USER, ADMIN, Bytes.toBytes(false));
|
||||
table.put(put);
|
||||
put = new Put(Bytes.toBytes(DISABLED_TOKEN));
|
||||
put.add(USER, NAME, Bytes.toBytes(DISABLED_USERNAME));
|
||||
put.add(USER, DISABLED, Bytes.toBytes(true));
|
||||
table.put(put);
|
||||
table.flushCommits();
|
||||
}
|
||||
authenticator = new HTableAuthenticator(conf, TABLE);
|
||||
}
|
||||
|
||||
public void testGetUserUnknown() throws Exception {
|
||||
User user = authenticator.getUserForToken(UNKNOWN_TOKEN);
|
||||
assertNull(user);
|
||||
}
|
||||
|
||||
public void testGetAdminUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(ADMIN_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), ADMIN_USERNAME);
|
||||
assertTrue(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetPlainUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(USER_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), USER_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetDisabledUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(DISABLED_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), DISABLED_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertTrue(user.isDisabled());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,112 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestJDBCAuthenticator extends TestCase {
|
||||
|
||||
static final Log LOG = LogFactory.getLog(TestJDBCAuthenticator.class);
|
||||
|
||||
static final String TABLE = "users";
|
||||
static final String JDBC_URL = "jdbc:hsqldb:mem:test";
|
||||
|
||||
static final String UNKNOWN_TOKEN = "00000000000000000000000000000000";
|
||||
static final String ADMIN_TOKEN = "e998efffc67c49c6e14921229a51b7b3";
|
||||
static final String ADMIN_USERNAME = "testAdmin";
|
||||
static final String USER_TOKEN = "da4829144e3a2febd909a6e1b4ed7cfa";
|
||||
static final String USER_USERNAME = "testUser";
|
||||
static final String DISABLED_TOKEN = "17de5b5db0fd3de0847bd95396f36d92";
|
||||
static final String DISABLED_USERNAME = "disabledUser";
|
||||
|
||||
static JDBCAuthenticator authenticator;
|
||||
static {
|
||||
try {
|
||||
Class.forName("org.hsqldb.jdbcDriver");
|
||||
Connection c = DriverManager.getConnection(JDBC_URL, "SA", "");
|
||||
c.createStatement().execute(
|
||||
"CREATE TABLE " + TABLE + " ( " +
|
||||
"token CHAR(32) PRIMARY KEY, " +
|
||||
"name VARCHAR(32), " +
|
||||
"admin BOOLEAN, " +
|
||||
"disabled BOOLEAN " +
|
||||
")");
|
||||
c.createStatement().execute(
|
||||
"INSERT INTO " + TABLE + " ( token,name,admin,disabled ) " +
|
||||
"VALUES ( '" + ADMIN_TOKEN + "','" + ADMIN_USERNAME +
|
||||
"',TRUE,FALSE )");
|
||||
c.createStatement().execute(
|
||||
"INSERT INTO " + TABLE + " ( token,name,admin,disabled ) " +
|
||||
"VALUES ( '" + USER_TOKEN + "','" + USER_USERNAME +
|
||||
"',FALSE,FALSE )");
|
||||
c.createStatement().execute(
|
||||
"INSERT INTO " + TABLE + " ( token,name,admin,disabled ) " +
|
||||
"VALUES ( '" + DISABLED_TOKEN + "','" + DISABLED_USERNAME +
|
||||
"',FALSE,TRUE )");
|
||||
c.createStatement().execute("CREATE USER test PASSWORD access");
|
||||
c.createStatement().execute("GRANT ALL ON " + TABLE + " TO test");
|
||||
c.close();
|
||||
authenticator = new JDBCAuthenticator(JDBC_URL, TABLE, "test",
|
||||
"access");
|
||||
} catch (Exception e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetUserUnknown() throws Exception {
|
||||
User user = authenticator.getUserForToken(UNKNOWN_TOKEN);
|
||||
assertNull(user);
|
||||
}
|
||||
|
||||
public void testGetAdminUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(ADMIN_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), ADMIN_USERNAME);
|
||||
assertTrue(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetPlainUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(USER_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), USER_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetDisabledUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(DISABLED_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), DISABLED_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertTrue(user.isDisabled());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.MiniClusterTestBase;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.stargate.auth.ZooKeeperAuthenticator.UserModel;
|
||||
|
||||
import org.apache.zookeeper.CreateMode;
|
||||
import org.apache.zookeeper.ZooKeeper;
|
||||
import org.apache.zookeeper.ZooDefs.Ids;
|
||||
|
||||
import com.sun.jersey.api.json.JSONConfiguration;
|
||||
import com.sun.jersey.api.json.JSONJAXBContext;
|
||||
import com.sun.jersey.api.json.JSONMarshaller;
|
||||
|
||||
public class TestZooKeeperAuthenticator extends MiniClusterTestBase {
|
||||
|
||||
static final String UNKNOWN_TOKEN = "00000000000000000000000000000000";
|
||||
static final String ADMIN_TOKEN = "e998efffc67c49c6e14921229a51b7b3";
|
||||
static final String ADMIN_USERNAME = "testAdmin";
|
||||
static final String USER_TOKEN = "da4829144e3a2febd909a6e1b4ed7cfa";
|
||||
static final String USER_USERNAME = "testUser";
|
||||
static final String DISABLED_TOKEN = "17de5b5db0fd3de0847bd95396f36d92";
|
||||
static final String DISABLED_USERNAME = "disabledUser";
|
||||
|
||||
ZooKeeperAuthenticator authenticator;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
authenticator = new ZooKeeperAuthenticator(conf);
|
||||
ZooKeeper zk = authenticator.wrapper.getZooKeeper();
|
||||
JSONJAXBContext context =
|
||||
new JSONJAXBContext(JSONConfiguration.natural().build(),
|
||||
UserModel.class);
|
||||
JSONMarshaller marshaller = context.createJSONMarshaller();
|
||||
if (zk.exists(ZooKeeperAuthenticator.USERS_ZNODE_ROOT + "/" +
|
||||
ADMIN_TOKEN, null) == null) {
|
||||
UserModel model = new UserModel();
|
||||
model.name = ADMIN_USERNAME;
|
||||
model.admin = true;
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
marshaller.marshallToJSON(model, os);
|
||||
zk.create(ZooKeeperAuthenticator.USERS_ZNODE_ROOT + "/" + ADMIN_TOKEN,
|
||||
os.toByteArray(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
|
||||
}
|
||||
if (zk.exists(ZooKeeperAuthenticator.USERS_ZNODE_ROOT + "/" +
|
||||
USER_TOKEN, null) == null) {
|
||||
UserModel model = new UserModel();
|
||||
model.name = USER_USERNAME;
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
marshaller.marshallToJSON(model, os);
|
||||
zk.create(ZooKeeperAuthenticator.USERS_ZNODE_ROOT + "/" + USER_TOKEN,
|
||||
os.toByteArray(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
|
||||
}
|
||||
if (zk.exists(ZooKeeperAuthenticator.USERS_ZNODE_ROOT + "/" +
|
||||
DISABLED_TOKEN, null) == null) {
|
||||
UserModel model = new UserModel();
|
||||
model.name = DISABLED_USERNAME;
|
||||
model.disabled = true;
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
marshaller.marshallToJSON(model, os);
|
||||
zk.create(ZooKeeperAuthenticator.USERS_ZNODE_ROOT + "/" +DISABLED_TOKEN,
|
||||
os.toByteArray(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetUserUnknown() throws Exception {
|
||||
User user = authenticator.getUserForToken(UNKNOWN_TOKEN);
|
||||
assertNull(user);
|
||||
}
|
||||
|
||||
public void testGetAdminUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(ADMIN_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), ADMIN_USERNAME);
|
||||
assertTrue(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetPlainUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(USER_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), USER_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertFalse(user.isDisabled());
|
||||
}
|
||||
|
||||
public void testGetDisabledUser() throws Exception {
|
||||
User user = authenticator.getUserForToken(DISABLED_TOKEN);
|
||||
assertNotNull(user);
|
||||
assertEquals(user.getName(), DISABLED_USERNAME);
|
||||
assertFalse(user.isAdmin());
|
||||
assertTrue(user.isDisabled());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.util;
|
||||
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.stargate.MiniClusterTestBase;
|
||||
import org.apache.hadoop.hbase.stargate.util.HTableTokenBucket;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestHTableTokenBucket extends MiniClusterTestBase {
|
||||
|
||||
static final String TABLE = "users";
|
||||
static final byte[] USER = Bytes.toBytes("user");
|
||||
static final byte[] NAME = Bytes.toBytes("name");
|
||||
static final byte[] TOKENS = Bytes.toBytes("tokens");
|
||||
static final byte[] TOKENS_RATE = Bytes.toBytes("tokens.rate");
|
||||
static final byte[] TOKENS_SIZE = Bytes.toBytes("tokens.size");
|
||||
static final String USER_TOKEN = "da4829144e3a2febd909a6e1b4ed7cfa";
|
||||
static final String USER_USERNAME = "testUser";
|
||||
static final double RATE = 1; // per second
|
||||
static final long SIZE = 10;
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||
if (!admin.tableExists(TABLE)) {
|
||||
HTableDescriptor htd = new HTableDescriptor(TABLE);
|
||||
htd.addFamily(new HColumnDescriptor(USER));
|
||||
admin.createTable(htd);
|
||||
HTable table = new HTable(TABLE);
|
||||
Put put = new Put(Bytes.toBytes(USER_TOKEN));
|
||||
put.add(USER, NAME, Bytes.toBytes(USER_USERNAME));
|
||||
put.add(USER, TOKENS_RATE, Bytes.toBytes(RATE));
|
||||
put.add(USER, TOKENS_SIZE, Bytes.toBytes(SIZE));
|
||||
table.put(put);
|
||||
table.flushCommits();
|
||||
}
|
||||
}
|
||||
|
||||
public void testTokenBucketConfig() throws Exception {
|
||||
HTableTokenBucket tb = new HTableTokenBucket(conf, TABLE,
|
||||
Bytes.toBytes(USER_TOKEN));
|
||||
assertEquals(tb.getRate(), RATE);
|
||||
assertEquals(tb.getSize(), SIZE);
|
||||
}
|
||||
|
||||
public void testTokenBucket() throws Exception {
|
||||
HTableTokenBucket tb = new HTableTokenBucket(conf, TABLE,
|
||||
Bytes.toBytes(USER_TOKEN));
|
||||
int last = 0;
|
||||
for (int i = 0; i <= 5 && last < 10; i++) {
|
||||
int avail = tb.available();
|
||||
assertTrue("bucket did not increment", avail > last);
|
||||
assertTrue("bucket updated too quickly", avail - last < 3);
|
||||
last = avail;
|
||||
Thread.sleep(2000);
|
||||
}
|
||||
assertTrue("bucket did not fill", last >= 10);
|
||||
assertTrue("bucket overfilled", last == 10);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-contrib</artifactId>
|
||||
<version>0.21.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hbase-contrib-stargate</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<name>HBase Contrib - Stargate</name>
|
||||
|
||||
<modules>
|
||||
<module>core</module>
|
||||
<module>war</module>
|
||||
</modules>
|
||||
|
||||
</project>
|
|
@ -1,23 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-contrib-stargate</artifactId>
|
||||
<version>0.21.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hbase-contrib-stargate-war</artifactId>
|
||||
<packaging>war</packaging>
|
||||
<name>HBase Contrib - Stargate WAR</name>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>hbase-contrib-stargate-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
|
@ -1,39 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE Configure PUBLIC "-//Mort Bay Consulting//DTD Configure//EN" "http://jetty.mortbay.org/configure.dtd">
|
||||
|
||||
<Configure id="Server" class="org.mortbay.jetty.Server">
|
||||
<Call name="addConnector">
|
||||
<Arg>
|
||||
<New class="org.mortbay.jetty.nio.SelectChannelConnector">
|
||||
<Set name="port">
|
||||
8080
|
||||
</Set>
|
||||
</New>
|
||||
</Arg>
|
||||
</Call>
|
||||
|
||||
<Set name="handler">
|
||||
<New id="Handlers" class="org.mortbay.jetty.handler.HandlerCollection">
|
||||
<Array type="org.mortbay.jetty.Handler">
|
||||
<Set name="handlers">
|
||||
<Item>
|
||||
<New id="Contexts" class="org.mortbay.jetty.handler.ContextHandlerCollection"/>
|
||||
</Item>
|
||||
</Set>
|
||||
</Array>
|
||||
</New>
|
||||
</Set>
|
||||
|
||||
<Call name="addLifeCycle">
|
||||
<Arg>
|
||||
<New class="org.mortbay.jetty.deployer.WebAppDeployer">
|
||||
<Set name="contexts">
|
||||
<Ref id="Contexts"/>
|
||||
</Set>
|
||||
<Set name="webAppDir">
|
||||
<SystemProperty name="jetty.home" default="."/>/webapps
|
||||
</Set>
|
||||
</New>
|
||||
</Arg>
|
||||
</Call>
|
||||
</Configure>
|
|
@ -1,29 +0,0 @@
|
|||
<!DOCTYPE web-app PUBLIC
|
||||
"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
|
||||
"http://java.sun.com/dtd/web-app_2_3.dtd">
|
||||
|
||||
<web-app>
|
||||
<display-name>
|
||||
Jersey RESTful test
|
||||
</display-name>
|
||||
|
||||
<servlet>
|
||||
<servlet-name>api</servlet-name>
|
||||
<servlet-class>org.apache.hadoop.hbase.stargate.RESTServlet</servlet-class>
|
||||
<load-on-startup>1</load-on-startup>
|
||||
|
||||
<init-param>
|
||||
<param-name>com.sun.jersey.config.property.packages</param-name>
|
||||
<param-value>org.apache.hadoop.hbase.stargate</param-value>
|
||||
</init-param>
|
||||
<init-param>
|
||||
<param-name>com.sun.jersey.config.property.resourceConfigClass</param-name>
|
||||
<param-value>com.sun.jersey.api.core.PackagesResourceConfig</param-value>
|
||||
</init-param>
|
||||
</servlet>
|
||||
|
||||
<servlet-mapping>
|
||||
<servlet-name>api</servlet-name>
|
||||
<url-pattern>/*</url-pattern>
|
||||
</servlet-mapping>
|
||||
</web-app>
|
48
core/pom.xml
48
core/pom.xml
|
@ -151,11 +151,17 @@
|
|||
</build>
|
||||
|
||||
<properties>
|
||||
<commons-httpclient.version>3.1</commons-httpclient.version>
|
||||
<commons-lang.version>2.5</commons-lang.version>
|
||||
<commons-math.version>2.1</commons-math.version>
|
||||
<jasper.version>5.5.23</jasper.version>
|
||||
<jaxb-api.version>2.1</jaxb-api.version>
|
||||
<jersey.version>1.1.5.1</jersey.version>
|
||||
<jruby.version>1.4.0</jruby.version>
|
||||
<jsr311.version>1.1.1</jsr311.version>
|
||||
<protobuf.version>2.3.0</protobuf.version>
|
||||
<slf4j.version>1.5.8</slf4j.version>
|
||||
<stax-api>1.0.1</stax-api>
|
||||
<thrift.version>0.2.0</thrift.version>
|
||||
</properties>
|
||||
|
||||
|
@ -164,6 +170,11 @@
|
|||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
<version>${commons-httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
|
@ -261,6 +272,43 @@
|
|||
<version>${jruby.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- REST dependencies -->
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${protobuf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-core</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-json</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-server</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
<version>${jaxb-api.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.ws.rs</groupId>
|
||||
<artifactId>jsr311-api</artifactId>
|
||||
<version>${jsr311.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>stax</groupId>
|
||||
<artifactId>stax-api</artifactId>
|
||||
<version>1.0.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
|
|
|
@ -18,24 +18,14 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
/**
|
||||
* Common constants for org.apache.hadoop.hbase.stargate
|
||||
* Common constants for org.apache.hadoop.hbase.rest
|
||||
*/
|
||||
public interface Constants {
|
||||
public static final String VERSION_STRING = "0.0.2";
|
||||
|
||||
public static final String AUTHENTICATOR_KEY = "stargate.authenticator";
|
||||
public static final String MULTIUSER_KEY = "stargate.multiuser";
|
||||
public static final String STATUS_REPORT_PERIOD_KEY =
|
||||
"stargate.status.period";
|
||||
|
||||
public static final String USERS_TABLE = "users";
|
||||
|
||||
public static final String INSTANCE_ZNODE_ROOT = "/stargate/instance";
|
||||
public static final String USERS_ZNODE_ROOT = "/stargate/users";
|
||||
|
||||
public static final int DEFAULT_MAX_AGE = 60 * 60 * 4; // 4 hours
|
||||
|
||||
public static final String MIMETYPE_TEXT = "text/plain";
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -33,39 +33,34 @@ import javax.ws.rs.core.Response.ResponseBuilder;
|
|||
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
|
||||
public class ExistsResource implements Constants {
|
||||
public class ExistsResource extends ResourceBase {
|
||||
|
||||
User user;
|
||||
String tableName;
|
||||
String actualTableName;
|
||||
CacheControl cacheControl;
|
||||
RESTServlet servlet;
|
||||
|
||||
public ExistsResource(User user, String table) throws IOException {
|
||||
if (user != null) {
|
||||
this.user = user;
|
||||
this.actualTableName =
|
||||
!user.isAdmin() ? (user.getName() + "." + table) : table;
|
||||
} else {
|
||||
this.actualTableName = table;
|
||||
}
|
||||
this.tableName = table;
|
||||
servlet = RESTServlet.getInstance();
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
String tableName;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param table
|
||||
* @throws IOException
|
||||
*/
|
||||
public ExistsResource(String table) throws IOException {
|
||||
super();
|
||||
this.tableName = table;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
|
||||
MIMETYPE_BINARY})
|
||||
public Response get(final @Context UriInfo uriInfo) throws IOException {
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
try {
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
if (!admin.tableExists(actualTableName)) {
|
||||
if (!admin.tableExists(tableName)) {
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
@ -75,5 +70,4 @@ public class ExistsResource implements Constants {
|
|||
response.cacheControl(cacheControl);
|
||||
return response.build();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2009 The Apache Software Foundation
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
|
@ -18,14 +18,13 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
|
||||
import java.net.InetAddress;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
|
||||
import org.mortbay.jetty.Server;
|
||||
import org.mortbay.jetty.servlet.Context;
|
||||
import org.mortbay.jetty.servlet.ServletHolder;
|
||||
|
@ -33,8 +32,7 @@ import org.mortbay.jetty.servlet.ServletHolder;
|
|||
import com.sun.jersey.spi.container.servlet.ServletContainer;
|
||||
|
||||
/**
|
||||
* Main class for launching Stargate as a servlet hosted by an embedded Jetty
|
||||
* servlet container.
|
||||
* Main class for launching REST gateway as a servlet hosted by Jetty.
|
||||
* <p>
|
||||
* The following options are supported:
|
||||
* <ul>
|
||||
|
@ -48,7 +46,6 @@ public class Main implements Constants {
|
|||
|
||||
Options options = new Options();
|
||||
options.addOption("p", "port", true, "service port");
|
||||
options.addOption("m", "multiuser", false, "enable multiuser mode");
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLine cmd = parser.parse(options, args);
|
||||
int port = 8080;
|
||||
|
@ -65,19 +62,10 @@ public class Main implements Constants {
|
|||
sh.setInitParameter("com.sun.jersey.config.property.packages",
|
||||
"jetty");
|
||||
|
||||
// configure the Stargate singleton
|
||||
// set up Jetty and run the embedded server
|
||||
|
||||
RESTServlet servlet = RESTServlet.getInstance();
|
||||
port = servlet.getConfiguration().getInt("stargate.port", port);
|
||||
if (!servlet.isMultiUser()) {
|
||||
servlet.setMultiUser(cmd.hasOption("m"));
|
||||
}
|
||||
servlet.addConnectorAddress(
|
||||
servlet.getConfiguration().get("stargate.hostname",
|
||||
InetAddress.getLocalHost().getCanonicalHostName()),
|
||||
port);
|
||||
|
||||
// set up Jetty and run the embedded server
|
||||
port = servlet.getConfiguration().getInt("hbase.rest.port", port);
|
||||
|
||||
Server server = new Server(port);
|
||||
server.setSendServerVersion(false);
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.rest.metrics.RESTMetrics;
|
||||
|
||||
/**
|
||||
* Singleton class encapsulating global REST servlet state and functions.
|
||||
*/
|
||||
public class RESTServlet implements Constants {
|
||||
|
||||
private static RESTServlet instance;
|
||||
|
||||
Configuration conf;
|
||||
HTablePool pool;
|
||||
AtomicBoolean stopping = new AtomicBoolean(false);
|
||||
Map<String,Integer> maxAgeMap =
|
||||
Collections.synchronizedMap(new HashMap<String,Integer>());
|
||||
RESTMetrics metrics = new RESTMetrics();
|
||||
|
||||
/**
|
||||
* @return the RESTServlet singleton instance
|
||||
* @throws IOException
|
||||
*/
|
||||
public synchronized static RESTServlet getInstance() throws IOException {
|
||||
if (instance == null) {
|
||||
instance = new RESTServlet();
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @throws IOException
|
||||
*/
|
||||
public RESTServlet() throws IOException {
|
||||
this.conf = HBaseConfiguration.create();
|
||||
this.pool = new HTablePool(conf, 10);
|
||||
}
|
||||
|
||||
HTablePool getTablePool() {
|
||||
return pool;
|
||||
}
|
||||
|
||||
Configuration getConfiguration() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
RESTMetrics getMetrics() {
|
||||
return metrics;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param tableName the table name
|
||||
* @return the maximum cache age suitable for use with this table, in
|
||||
* seconds
|
||||
* @throws IOException
|
||||
*/
|
||||
public int getMaxAge(String tableName) throws IOException {
|
||||
Integer i = maxAgeMap.get(tableName);
|
||||
if (i != null) {
|
||||
return i.intValue();
|
||||
}
|
||||
HTableInterface table = pool.getTable(tableName);
|
||||
try {
|
||||
int maxAge = DEFAULT_MAX_AGE;
|
||||
for (HColumnDescriptor family :
|
||||
table.getTableDescriptor().getFamilies()) {
|
||||
int ttl = family.getTimeToLive();
|
||||
if (ttl < 0) {
|
||||
continue;
|
||||
}
|
||||
if (ttl < maxAge) {
|
||||
maxAge = ttl;
|
||||
}
|
||||
}
|
||||
maxAgeMap.put(tableName, maxAge);
|
||||
return maxAge;
|
||||
} finally {
|
||||
pool.putTable(table);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Signal that a previously calculated maximum cache age has been
|
||||
* invalidated by a schema change.
|
||||
* @param tableName the table name
|
||||
*/
|
||||
public void invalidateMaxAge(String tableName) {
|
||||
maxAgeMap.remove(tableName);
|
||||
}
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
|
@ -42,38 +42,30 @@ import org.apache.hadoop.hbase.TableNotFoundException;
|
|||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableRegionModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableRegionModel;
|
||||
|
||||
public class RegionsResource implements Constants {
|
||||
public class RegionsResource extends ResourceBase {
|
||||
private static final Log LOG = LogFactory.getLog(RegionsResource.class);
|
||||
|
||||
User user;
|
||||
String tableName;
|
||||
String actualTableName;
|
||||
CacheControl cacheControl;
|
||||
RESTServlet servlet;
|
||||
|
||||
public RegionsResource(User user, String table) throws IOException {
|
||||
if (user != null) {
|
||||
this.user = user;
|
||||
this.actualTableName =
|
||||
!user.isAdmin() ? (user.getName() + "." + table) : table;
|
||||
} else {
|
||||
this.actualTableName = table;
|
||||
}
|
||||
this.tableName = table;
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
servlet = RESTServlet.getInstance();
|
||||
}
|
||||
|
||||
String tableName;
|
||||
|
||||
public RegionsResource(String table) throws IOException {
|
||||
super();
|
||||
this.tableName = table;
|
||||
}
|
||||
|
||||
private Map<HRegionInfo,HServerAddress> getTableRegions()
|
||||
throws IOException {
|
||||
HTablePool pool = servlet.getTablePool();
|
||||
HTableInterface table = pool.getTable(actualTableName);
|
||||
HTableInterface table = pool.getTable(tableName);
|
||||
try {
|
||||
return ((HTable)table).getRegionsInfo();
|
||||
} finally {
|
||||
|
@ -83,32 +75,22 @@ public class RegionsResource implements Constants {
|
|||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) throws IOException {
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
String name = user.isAdmin() ? actualTableName : tableName;
|
||||
TableInfoModel model = new TableInfoModel(name);
|
||||
TableInfoModel model = new TableInfoModel(tableName);
|
||||
Map<HRegionInfo,HServerAddress> regions = getTableRegions();
|
||||
for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
|
||||
HRegionInfo hri = e.getKey();
|
||||
if (user.isAdmin()) {
|
||||
HServerAddress addr = e.getValue();
|
||||
InetSocketAddress sa = addr.getInetSocketAddress();
|
||||
model.add(
|
||||
new TableRegionModel(name, hri.getRegionId(), hri.getStartKey(),
|
||||
hri.getEndKey(),
|
||||
sa.getHostName() + ":" + Integer.valueOf(sa.getPort())));
|
||||
} else {
|
||||
model.add(
|
||||
new TableRegionModel(name, hri.getRegionId(), hri.getStartKey(),
|
||||
hri.getEndKey()));
|
||||
}
|
||||
HServerAddress addr = e.getValue();
|
||||
InetSocketAddress sa = addr.getInetSocketAddress();
|
||||
model.add(
|
||||
new TableRegionModel(tableName, hri.getRegionId(),
|
||||
hri.getStartKey(), hri.getEndKey(),
|
||||
sa.getHostName() + ":" + Integer.valueOf(sa.getPort())));
|
||||
}
|
||||
ResponseBuilder response = Response.ok(model);
|
||||
response.cacheControl(cacheControl);
|
||||
|
@ -120,5 +102,4 @@ public class RegionsResource implements Constants {
|
|||
Response.Status.SERVICE_UNAVAILABLE);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -18,14 +18,15 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.auth;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
public class ResourceBase implements Constants {
|
||||
|
||||
public abstract class Authenticator {
|
||||
|
||||
public abstract User getUserForToken(String token) throws IOException;
|
||||
RESTServlet servlet;
|
||||
|
||||
public ResourceBase() throws IOException {
|
||||
servlet = RESTServlet.getInstance();
|
||||
}
|
||||
}
|
|
@ -18,12 +18,12 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import com.sun.jersey.api.core.PackagesResourceConfig;
|
||||
|
||||
public class ResourceConfig extends PackagesResourceConfig {
|
||||
public ResourceConfig() {
|
||||
super("org.apache.hadoop.hbase.stargate");
|
||||
super("org.apache.hadoop.hbase.rest");
|
||||
}
|
||||
}
|
|
@ -18,14 +18,14 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
|
||||
public abstract class ResultGenerator implements Iterator<KeyValue> {
|
||||
|
||||
|
@ -45,5 +45,4 @@ public abstract class ResultGenerator implements Iterator<KeyValue> {
|
|||
public abstract void putBack(KeyValue kv);
|
||||
|
||||
public abstract void close();
|
||||
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.CacheControl;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.Response;
|
||||
import javax.ws.rs.core.UriInfo;
|
||||
import javax.ws.rs.core.Response.ResponseBuilder;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.rest.model.TableListModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableModel;
|
||||
|
||||
@Path("/")
|
||||
public class RootResource extends ResourceBase {
|
||||
private static final Log LOG = LogFactory.getLog(RootResource.class);
|
||||
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @throws IOException
|
||||
*/
|
||||
public RootResource() throws IOException {
|
||||
super();
|
||||
}
|
||||
|
||||
private final TableListModel getTableList() throws IOException {
|
||||
TableListModel tableList = new TableListModel();
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
HTableDescriptor[] list = admin.listTables();
|
||||
for (HTableDescriptor htd: list) {
|
||||
tableList.add(new TableModel(htd.getNameAsString()));
|
||||
}
|
||||
return tableList;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
ResponseBuilder response = Response.ok(getTableList());
|
||||
response.cacheControl(cacheControl);
|
||||
return response.build();
|
||||
} catch (IOException e) {
|
||||
throw new WebApplicationException(e,
|
||||
Response.Status.SERVICE_UNAVAILABLE);
|
||||
}
|
||||
}
|
||||
|
||||
@Path("status/cluster")
|
||||
public StorageClusterStatusResource getClusterStatusResource()
|
||||
throws IOException {
|
||||
return new StorageClusterStatusResource();
|
||||
}
|
||||
|
||||
@Path("version")
|
||||
public VersionResource getVersionResource() throws IOException {
|
||||
return new VersionResource();
|
||||
}
|
||||
|
||||
@Path("{table}")
|
||||
public TableResource getTableResource(
|
||||
final @PathParam("table") String table) throws IOException {
|
||||
return new TableResource(table);
|
||||
}
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URLDecoder;
|
||||
|
@ -47,56 +47,49 @@ import org.apache.hadoop.hbase.client.HTableInterface;
|
|||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class RowResource implements Constants {
|
||||
public class RowResource extends ResourceBase {
|
||||
private static final Log LOG = LogFactory.getLog(RowResource.class);
|
||||
|
||||
User user;
|
||||
String tableName;
|
||||
String actualTableName;
|
||||
RowSpec rowspec;
|
||||
CacheControl cacheControl;
|
||||
RESTServlet servlet;
|
||||
|
||||
public RowResource(User user, String table, String rowspec, String versions)
|
||||
/**
|
||||
* Constructor
|
||||
* @param table
|
||||
* @param rowspec
|
||||
* @param versions
|
||||
* @throws IOException
|
||||
*/
|
||||
public RowResource(String table, String rowspec, String versions)
|
||||
throws IOException {
|
||||
this.user = user;
|
||||
if (user != null) {
|
||||
this.actualTableName =
|
||||
!user.isAdmin() ? user.getName() + "." + table : table;
|
||||
} else {
|
||||
this.actualTableName = table;
|
||||
}
|
||||
super();
|
||||
this.tableName = table;
|
||||
this.rowspec = new RowSpec(URLDecoder.decode(rowspec,
|
||||
HConstants.UTF8_ENCODING));
|
||||
if (versions != null) {
|
||||
this.rowspec.setMaxVersions(Integer.valueOf(versions));
|
||||
}
|
||||
this.servlet = RESTServlet.getInstance();
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setMaxAge(servlet.getMaxAge(actualTableName));
|
||||
cacheControl.setNoTransform(false);
|
||||
this.cacheControl = new CacheControl();
|
||||
this.cacheControl.setMaxAge(servlet.getMaxAge(tableName));
|
||||
this.cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) throws IOException {
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
ResultGenerator generator =
|
||||
ResultGenerator.fromRowSpec(actualTableName, rowspec, null);
|
||||
ResultGenerator.fromRowSpec(tableName, rowspec, null);
|
||||
if (!generator.hasNext()) {
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
}
|
||||
|
@ -131,14 +124,10 @@ public class RowResource implements Constants {
|
|||
|
||||
@GET
|
||||
@Produces(MIMETYPE_BINARY)
|
||||
public Response getBinary(final @Context UriInfo uriInfo)
|
||||
throws IOException {
|
||||
public Response getBinary(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
// doesn't make sense to use a non specific coordinate as this can only
|
||||
// return a single cell
|
||||
|
@ -147,7 +136,7 @@ public class RowResource implements Constants {
|
|||
}
|
||||
try {
|
||||
ResultGenerator generator =
|
||||
ResultGenerator.fromRowSpec(actualTableName, rowspec, null);
|
||||
ResultGenerator.fromRowSpec(tableName, rowspec, null);
|
||||
if (!generator.hasNext()) {
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
}
|
||||
|
@ -168,12 +157,7 @@ public class RowResource implements Constants {
|
|||
HTableInterface table = null;
|
||||
try {
|
||||
List<RowModel> rows = model.getRows();
|
||||
// the user request limit is a transaction limit, so we need to
|
||||
// account for updates by row
|
||||
if (user != null && !servlet.userRequestLimit(user, rows.size())) {
|
||||
throw new WebApplicationException(Response.status(509).build());
|
||||
}
|
||||
table = pool.getTable(actualTableName);
|
||||
table = pool.getTable(tableName);
|
||||
((HTable)table).setAutoFlush(false);
|
||||
for (RowModel row: rows) {
|
||||
byte[] key = row.getKey();
|
||||
|
@ -241,7 +225,7 @@ public class RowResource implements Constants {
|
|||
} else {
|
||||
put.add(parts[0], null, timestamp, message);
|
||||
}
|
||||
table = pool.getTable(actualTableName);
|
||||
table = pool.getTable(tableName);
|
||||
table.put(put);
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("PUT " + put.toString());
|
||||
|
@ -260,66 +244,48 @@ public class RowResource implements Constants {
|
|||
@PUT
|
||||
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response put(final CellSetModel model,
|
||||
final @Context UriInfo uriInfo) throws IOException {
|
||||
final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("PUT " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
return update(model, true);
|
||||
}
|
||||
|
||||
@PUT
|
||||
@Consumes(MIMETYPE_BINARY)
|
||||
public Response putBinary(final byte[] message,
|
||||
final @Context UriInfo uriInfo, final @Context HttpHeaders headers)
|
||||
throws IOException
|
||||
{
|
||||
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
return updateBinary(message, headers, true);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response post(final CellSetModel model,
|
||||
final @Context UriInfo uriInfo) throws IOException {
|
||||
final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("POST " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
return update(model, false);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Consumes(MIMETYPE_BINARY)
|
||||
public Response postBinary(final byte[] message,
|
||||
final @Context UriInfo uriInfo, final @Context HttpHeaders headers)
|
||||
throws IOException {
|
||||
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
return updateBinary(message, headers, false);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
public Response delete(final @Context UriInfo uriInfo) throws IOException {
|
||||
public Response delete(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("DELETE " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
Delete delete = null;
|
||||
if (rowspec.hasTimestamp())
|
||||
|
@ -346,7 +312,7 @@ public class RowResource implements Constants {
|
|||
HTablePool pool = servlet.getTablePool();
|
||||
HTableInterface table = null;
|
||||
try {
|
||||
table = pool.getTable(actualTableName);
|
||||
table = pool.getTable(tableName);
|
||||
table.delete(delete);
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("DELETE " + delete.toString());
|
||||
|
@ -361,5 +327,4 @@ public class RowResource implements Constants {
|
|||
}
|
||||
return Response.ok().build();
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -108,5 +108,4 @@ public class RowResultGenerator extends ResultGenerator {
|
|||
public void remove() {
|
||||
throw new UnsupportedOperationException("remove not supported");
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.TreeSet;
|
||||
|
@ -377,5 +377,4 @@ public class RowSpec {
|
|||
result.append("}");
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -37,41 +37,38 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.sun.jersey.core.util.Base64;
|
||||
|
||||
public class ScannerInstanceResource implements Constants {
|
||||
public class ScannerInstanceResource extends ResourceBase {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(ScannerInstanceResource.class);
|
||||
|
||||
User user;
|
||||
ResultGenerator generator;
|
||||
String id;
|
||||
int batch = 1;
|
||||
RESTServlet servlet;
|
||||
CacheControl cacheControl;
|
||||
|
||||
public ScannerInstanceResource(User user, String table, String id,
|
||||
ResultGenerator generator, int batch) throws IOException {
|
||||
this.user = user;
|
||||
this.id = id;
|
||||
this.generator = generator;
|
||||
this.batch = batch;
|
||||
servlet = RESTServlet.getInstance();
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
ResultGenerator generator;
|
||||
String id;
|
||||
int batch = 1;
|
||||
|
||||
public ScannerInstanceResource(String table, String id,
|
||||
ResultGenerator generator, int batch) throws IOException {
|
||||
this.id = id;
|
||||
this.generator = generator;
|
||||
this.batch = batch;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo,
|
||||
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues)
|
||||
throws IOException {
|
||||
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
|
@ -106,12 +103,6 @@ public class ScannerInstanceResource implements Constants {
|
|||
rowModel = new RowModel(rowKey);
|
||||
}
|
||||
if (!Bytes.equals(value.getRow(), rowKey)) {
|
||||
// the user request limit is a transaction limit, so we need to
|
||||
// account for scanner.next()
|
||||
if (user != null && !servlet.userRequestLimit(user, 1)) {
|
||||
generator.putBack(value);
|
||||
break;
|
||||
}
|
||||
// if maxRows was given as a query param, stop if we would exceed the
|
||||
// specified number of rows
|
||||
if (maxRows > 0) {
|
||||
|
@ -150,9 +141,9 @@ public class ScannerInstanceResource implements Constants {
|
|||
}
|
||||
ResponseBuilder response = Response.ok(value.getValue());
|
||||
response.cacheControl(cacheControl);
|
||||
response.header("X-Row", Base64.encode(value.getRow()));
|
||||
response.header("X-Row", Base64.encodeBytes(value.getRow()));
|
||||
response.header("X-Column",
|
||||
Base64.encode(
|
||||
Base64.encodeBytes(
|
||||
KeyValue.makeColumn(value.getFamily(), value.getQualifier())));
|
||||
response.header("X-Timestamp", value.getTimestamp());
|
||||
return response.build();
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
@ -41,32 +41,25 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
|
||||
public class ScannerResource implements Constants {
|
||||
public class ScannerResource extends ResourceBase {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ScannerResource.class);
|
||||
|
||||
static final Map<String,ScannerInstanceResource> scanners =
|
||||
Collections.synchronizedMap(new HashMap<String,ScannerInstanceResource>());
|
||||
|
||||
User user;
|
||||
String tableName;
|
||||
String actualTableName;
|
||||
RESTServlet servlet;
|
||||
|
||||
public ScannerResource(User user, String table) throws IOException {
|
||||
if (user != null) {
|
||||
this.user = user;
|
||||
this.actualTableName =
|
||||
!user.isAdmin() ? user.getName() + "." + table : table;
|
||||
} else {
|
||||
this.actualTableName = table;
|
||||
}
|
||||
/**
|
||||
* Constructor
|
||||
* @param table
|
||||
* @throws IOException
|
||||
*/
|
||||
public ScannerResource(String table) throws IOException {
|
||||
super();
|
||||
this.tableName = table;
|
||||
servlet = RESTServlet.getInstance();
|
||||
}
|
||||
|
||||
static void delete(final String id) {
|
||||
|
@ -77,10 +70,7 @@ public class ScannerResource implements Constants {
|
|||
}
|
||||
|
||||
Response update(final ScannerModel model, final boolean replace,
|
||||
final UriInfo uriInfo) throws IOException {
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
final UriInfo uriInfo) {
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
byte[] endRow = model.hasEndRow() ? model.getEndRow() : null;
|
||||
RowSpec spec = new RowSpec(model.getStartRow(), endRow,
|
||||
|
@ -88,11 +78,10 @@ public class ScannerResource implements Constants {
|
|||
try {
|
||||
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
|
||||
ScannerResultGenerator gen =
|
||||
new ScannerResultGenerator(actualTableName, spec, filter);
|
||||
new ScannerResultGenerator(tableName, spec, filter);
|
||||
String id = gen.getID();
|
||||
ScannerInstanceResource instance =
|
||||
new ScannerInstanceResource(user, actualTableName, id, gen,
|
||||
model.getBatch());
|
||||
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
|
||||
scanners.put(id, instance);
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("new scanner: " + id);
|
||||
|
@ -111,7 +100,7 @@ public class ScannerResource implements Constants {
|
|||
@PUT
|
||||
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response put(final ScannerModel model,
|
||||
final @Context UriInfo uriInfo) throws IOException {
|
||||
final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("PUT " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
|
@ -121,7 +110,7 @@ public class ScannerResource implements Constants {
|
|||
@POST
|
||||
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response post(final ScannerModel model,
|
||||
final @Context UriInfo uriInfo) throws IOException {
|
||||
final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("POST " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
|
@ -137,5 +126,4 @@ public class ScannerResource implements Constants {
|
|||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.client.Result;
|
|||
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
public class ScannerResultGenerator extends ResultGenerator {
|
||||
|
@ -175,5 +175,4 @@ public class ScannerResultGenerator extends ResultGenerator {
|
|||
public void remove() {
|
||||
throw new UnsupportedOperationException("remove not supported");
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -48,39 +48,36 @@ import org.apache.hadoop.hbase.TableNotFoundException;
|
|||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
import org.apache.hadoop.hbase.stargate.model.ColumnSchemaModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class SchemaResource implements Constants {
|
||||
public class SchemaResource extends ResourceBase {
|
||||
private static final Log LOG = LogFactory.getLog(SchemaResource.class);
|
||||
|
||||
User user;
|
||||
String tableName;
|
||||
String actualTableName;
|
||||
CacheControl cacheControl;
|
||||
RESTServlet servlet;
|
||||
|
||||
public SchemaResource(User user, String table) throws IOException {
|
||||
if (user != null) {
|
||||
this.user = user;
|
||||
this.actualTableName =
|
||||
!user.isAdmin() ? (user.getName() + "." + table) : table;
|
||||
} else {
|
||||
this.actualTableName = table;
|
||||
}
|
||||
this.tableName = table;
|
||||
servlet = RESTServlet.getInstance();
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
String tableName;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param table
|
||||
* @throws IOException
|
||||
*/
|
||||
public SchemaResource(String table) throws IOException {
|
||||
super();
|
||||
this.tableName = table;
|
||||
}
|
||||
|
||||
private HTableDescriptor getTableSchema() throws IOException,
|
||||
TableNotFoundException {
|
||||
HTablePool pool = servlet.getTablePool();
|
||||
HTableInterface table = pool.getTable(actualTableName);
|
||||
HTableInterface table = pool.getTable(tableName);
|
||||
try {
|
||||
return table.getTableDescriptor();
|
||||
} finally {
|
||||
|
@ -90,13 +87,10 @@ public class SchemaResource implements Constants {
|
|||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) throws IOException {
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
ResponseBuilder response =
|
||||
|
@ -111,11 +105,10 @@ public class SchemaResource implements Constants {
|
|||
}
|
||||
}
|
||||
|
||||
private Response replace(final byte[] tableName,
|
||||
final TableSchemaModel model, final UriInfo uriInfo,
|
||||
final HBaseAdmin admin) {
|
||||
private Response replace(final byte[] name, final TableSchemaModel model,
|
||||
final UriInfo uriInfo, final HBaseAdmin admin) {
|
||||
try {
|
||||
HTableDescriptor htd = new HTableDescriptor(tableName);
|
||||
HTableDescriptor htd = new HTableDescriptor(name);
|
||||
for (Map.Entry<QName,Object> e: model.getAny().entrySet()) {
|
||||
htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
|
||||
}
|
||||
|
@ -126,10 +119,10 @@ public class SchemaResource implements Constants {
|
|||
}
|
||||
htd.addFamily(hcd);
|
||||
}
|
||||
if (admin.tableExists(tableName)) {
|
||||
admin.disableTable(tableName);
|
||||
admin.modifyTable(tableName, htd);
|
||||
admin.enableTable(tableName);
|
||||
if (admin.tableExists(name)) {
|
||||
admin.disableTable(name);
|
||||
admin.modifyTable(name, htd);
|
||||
admin.enableTable(name);
|
||||
} else try {
|
||||
admin.createTable(htd);
|
||||
} catch (TableExistsException e) {
|
||||
|
@ -143,11 +136,11 @@ public class SchemaResource implements Constants {
|
|||
}
|
||||
}
|
||||
|
||||
private Response update(final byte[] tableName,final TableSchemaModel model,
|
||||
private Response update(final byte[] name, final TableSchemaModel model,
|
||||
final UriInfo uriInfo, final HBaseAdmin admin) {
|
||||
try {
|
||||
HTableDescriptor htd = admin.getTableDescriptor(tableName);
|
||||
admin.disableTable(tableName);
|
||||
HTableDescriptor htd = admin.getTableDescriptor(name);
|
||||
admin.disableTable(name);
|
||||
try {
|
||||
for (ColumnSchemaModel family: model.getColumns()) {
|
||||
HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
|
||||
|
@ -155,7 +148,7 @@ public class SchemaResource implements Constants {
|
|||
hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
|
||||
}
|
||||
if (htd.hasFamily(hcd.getName())) {
|
||||
admin.modifyColumn(tableName, hcd.getName(), hcd);
|
||||
admin.modifyColumn(name, hcd.getName(), hcd);
|
||||
} else {
|
||||
admin.addColumn(model.getName(), hcd);
|
||||
}
|
||||
|
@ -177,12 +170,12 @@ public class SchemaResource implements Constants {
|
|||
final UriInfo uriInfo) {
|
||||
try {
|
||||
servlet.invalidateMaxAge(tableName);
|
||||
byte[] tableName = Bytes.toBytes(actualTableName);
|
||||
byte[] name = Bytes.toBytes(tableName);
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
if (replace || !admin.tableExists(tableName)) {
|
||||
return replace(tableName, model, uriInfo, admin);
|
||||
if (replace || !admin.tableExists(name)) {
|
||||
return replace(name, model, uriInfo, admin);
|
||||
} else {
|
||||
return update(tableName, model, uriInfo, admin);
|
||||
return update(name, model, uriInfo, admin);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new WebApplicationException(e,
|
||||
|
@ -193,13 +186,10 @@ public class SchemaResource implements Constants {
|
|||
@PUT
|
||||
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response put(final TableSchemaModel model,
|
||||
final @Context UriInfo uriInfo) throws IOException {
|
||||
final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("PUT " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
return update(model, true, uriInfo);
|
||||
}
|
||||
|
@ -207,31 +197,25 @@ public class SchemaResource implements Constants {
|
|||
@POST
|
||||
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response post(final TableSchemaModel model,
|
||||
final @Context UriInfo uriInfo) throws IOException {
|
||||
final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("PUT " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
return update(model, false, uriInfo);
|
||||
}
|
||||
|
||||
@DELETE
|
||||
public Response delete(final @Context UriInfo uriInfo) throws IOException {
|
||||
public Response delete(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("DELETE " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
return Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
boolean success = false;
|
||||
for (int i = 0; i < 10; i++) try {
|
||||
admin.disableTable(actualTableName);
|
||||
admin.disableTable(tableName);
|
||||
success = true;
|
||||
break;
|
||||
} catch (IOException e) {
|
||||
|
@ -239,7 +223,7 @@ public class SchemaResource implements Constants {
|
|||
if (!success) {
|
||||
throw new IOException("could not disable table");
|
||||
}
|
||||
admin.deleteTable(actualTableName);
|
||||
admin.deleteTable(tableName);
|
||||
return Response.ok().build();
|
||||
} catch (TableNotFoundException e) {
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
|
@ -248,5 +232,4 @@ public class SchemaResource implements Constants {
|
|||
Response.Status.SERVICE_UNAVAILABLE);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -38,33 +38,33 @@ import org.apache.hadoop.hbase.ClusterStatus;
|
|||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HServerLoad;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.stargate.model.StorageClusterStatusModel;
|
||||
import org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel;
|
||||
|
||||
public class StorageClusterStatusResource implements Constants {
|
||||
public class StorageClusterStatusResource extends ResourceBase {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(StorageClusterStatusResource.class);
|
||||
|
||||
private User user;
|
||||
private CacheControl cacheControl;
|
||||
private RESTServlet servlet;
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
public StorageClusterStatusResource(User user) throws IOException {
|
||||
this.user = user;
|
||||
this.servlet = RESTServlet.getInstance();
|
||||
this.cacheControl = new CacheControl();
|
||||
this.cacheControl.setNoCache(true);
|
||||
this.cacheControl.setNoTransform(false);
|
||||
/**
|
||||
* Constructor
|
||||
* @throws IOException
|
||||
*/
|
||||
public StorageClusterStatusResource() throws IOException {
|
||||
super();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) throws IOException {
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
if (!servlet.userRequestLimit(user, 1)) {
|
||||
Response.status(509).build();
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
try {
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -33,24 +33,29 @@ import javax.ws.rs.core.Response.ResponseBuilder;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.stargate.model.StorageClusterVersionModel;
|
||||
|
||||
public class StorageClusterVersionResource implements Constants {
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
|
||||
|
||||
public class StorageClusterVersionResource extends ResourceBase {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(StorageClusterVersionResource.class);
|
||||
|
||||
private CacheControl cacheControl;
|
||||
private RESTServlet servlet;
|
||||
|
||||
public StorageClusterVersionResource() throws IOException {
|
||||
servlet = RESTServlet.getInstance();
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @throws IOException
|
||||
*/
|
||||
public StorageClusterVersionResource() throws IOException {
|
||||
super();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
|
@ -58,9 +63,8 @@ public class StorageClusterVersionResource implements Constants {
|
|||
LOG.debug("GET " + uriInfo.getAbsolutePath());
|
||||
}
|
||||
servlet.getMetrics().incrementRequests(1);
|
||||
Configuration conf = servlet.getConfiguration();
|
||||
try {
|
||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||
StorageClusterVersionModel model = new StorageClusterVersionModel();
|
||||
model.setVersion(admin.getClusterStatus().getHBaseVersion());
|
||||
ResponseBuilder response = Response.ok(model);
|
|
@ -18,58 +18,52 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.User;
|
||||
public class TableResource extends ResourceBase {
|
||||
|
||||
public class TableResource implements Constants {
|
||||
|
||||
User user;
|
||||
String table;
|
||||
|
||||
public TableResource(User user, String table) {
|
||||
this.user = user;
|
||||
/**
|
||||
* Constructor
|
||||
* @param table
|
||||
* @throws IOException
|
||||
*/
|
||||
public TableResource(String table) throws IOException {
|
||||
super();
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
@Path("exists")
|
||||
public ExistsResource getExistsResource() throws IOException {
|
||||
return new ExistsResource(user, table);
|
||||
return new ExistsResource(table);
|
||||
}
|
||||
|
||||
@Path("regions")
|
||||
public RegionsResource getRegionsResource() throws IOException {
|
||||
return new RegionsResource(user, table);
|
||||
return new RegionsResource(table);
|
||||
}
|
||||
|
||||
@Path("scanner")
|
||||
public ScannerResource getScannerResource() throws IOException {
|
||||
return new ScannerResource(user, table);
|
||||
return new ScannerResource(table);
|
||||
}
|
||||
|
||||
@Path("schema")
|
||||
public SchemaResource getSchemaResource() throws IOException {
|
||||
return new SchemaResource(user, table);
|
||||
return new SchemaResource(table);
|
||||
}
|
||||
|
||||
@Path("{rowspec: .+}")
|
||||
public RowResource getRowResource(
|
||||
final @PathParam("rowspec") String rowspec,
|
||||
final @QueryParam("v") String versions) {
|
||||
try {
|
||||
return new RowResource(user, table, rowspec, versions);
|
||||
} catch (IOException e) {
|
||||
throw new WebApplicationException(e,
|
||||
Response.Status.INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
final @QueryParam("v") String versions) throws IOException {
|
||||
return new RowResource(table, rowspec, versions);
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -34,28 +34,35 @@ import javax.ws.rs.core.Response.ResponseBuilder;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.stargate.model.VersionModel;
|
||||
|
||||
import org.apache.hadoop.hbase.rest.model.VersionModel;
|
||||
|
||||
/**
|
||||
* Implements Stargate software version reporting via
|
||||
* Implements REST software version reporting
|
||||
* <p>
|
||||
* <tt>/version/stargate</tt>
|
||||
* <tt>/version/rest</tt>
|
||||
* <p>
|
||||
* <tt>/version</tt> (alias for <tt>/version/stargate</tt>)
|
||||
* <tt>/version</tt> (alias for <tt>/version/rest</tt>)
|
||||
*/
|
||||
public class VersionResource implements Constants {
|
||||
public class VersionResource extends ResourceBase {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(VersionResource.class);
|
||||
|
||||
private CacheControl cacheControl;
|
||||
private RESTServlet servlet;
|
||||
|
||||
public VersionResource() throws IOException {
|
||||
servlet = RESTServlet.getInstance();
|
||||
static CacheControl cacheControl;
|
||||
static {
|
||||
cacheControl = new CacheControl();
|
||||
cacheControl.setNoCache(true);
|
||||
cacheControl.setNoTransform(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @throws IOException
|
||||
*/
|
||||
public VersionResource() throws IOException {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a response for a version request.
|
||||
* @param context servlet context
|
||||
|
@ -85,9 +92,9 @@ public class VersionResource implements Constants {
|
|||
}
|
||||
|
||||
/**
|
||||
* Dispatch <tt>/version/stargate</tt> to self.
|
||||
* Dispatch <tt>/version/rest</tt> to self.
|
||||
*/
|
||||
@Path("stargate")
|
||||
@Path("rest")
|
||||
public VersionResource getVersionResource() {
|
||||
return this;
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.client;
|
||||
package org.apache.hadoop.hbase.rest.client;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -41,7 +41,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
|
||||
/**
|
||||
* A wrapper around HttpClient which provides some useful function and
|
||||
* semantics for interacting with the Stargate REST gateway.
|
||||
* semantics for interacting with the REST gateway.
|
||||
*/
|
||||
public class Client {
|
||||
public static final Header[] EMPTY_HEADER_ARRAY = new Header[0];
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.client;
|
||||
package org.apache.hadoop.hbase.rest.client;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
|
@ -18,15 +18,15 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.client;
|
||||
package org.apache.hadoop.hbase.rest.client;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.Constants;
|
||||
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class RemoteAdmin {
|
||||
|
@ -56,8 +56,8 @@ public class RemoteAdmin {
|
|||
this.client = client;
|
||||
this.conf = conf;
|
||||
this.accessToken = accessToken;
|
||||
this.maxRetries = conf.getInt("stargate.client.max.retries", 10);
|
||||
this.sleepTime = conf.getLong("stargate.client.sleep", 1000);
|
||||
this.maxRetries = conf.getInt("hbase.rest.client.max.retries", 10);
|
||||
this.sleepTime = conf.getLong("hbase.rest.client.sleep", 1000);
|
||||
}
|
||||
|
||||
/**
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.client;
|
||||
package org.apache.hadoop.hbase.rest.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -48,16 +48,16 @@ import org.apache.hadoop.hbase.client.ResultScanner;
|
|||
import org.apache.hadoop.hbase.client.RowLock;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.Constants;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
* HTable interface to remote tables accessed via Stargate
|
||||
* HTable interface to remote tables accessed via REST gateway
|
||||
*/
|
||||
public class RemoteHTable implements HTableInterface {
|
||||
|
||||
|
@ -208,8 +208,8 @@ public class RemoteHTable implements HTableInterface {
|
|||
this.conf = conf;
|
||||
this.name = name;
|
||||
this.accessToken = accessToken;
|
||||
this.maxRetries = conf.getInt("stargate.client.max.retries", 10);
|
||||
this.sleepTime = conf.getLong("stargate.client.sleep", 1000);
|
||||
this.maxRetries = conf.getInt("hbase.rest.client.max.retries", 10);
|
||||
this.sleepTime = conf.getLong("hbase.rest.client.sleep", 1000);
|
||||
}
|
||||
|
||||
public byte[] getTableName() {
|
||||
|
@ -327,7 +327,7 @@ public class RemoteHTable implements HTableInterface {
|
|||
}
|
||||
|
||||
public void put(List<Put> puts) throws IOException {
|
||||
// this is a trick: Stargate accepts multiple rows in a cell set and
|
||||
// this is a trick: The gateway accepts multiple rows in a cell set and
|
||||
// ignores the row specification in the URI
|
||||
|
||||
// separate puts by row
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.client;
|
||||
package org.apache.hadoop.hbase.rest.client;
|
||||
|
||||
import org.apache.commons.httpclient.Header;
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.metrics;
|
||||
package org.apache.hadoop.hbase.rest.metrics;
|
||||
|
||||
import org.apache.hadoop.hbase.metrics.MetricsRate;
|
||||
|
||||
|
@ -29,28 +29,28 @@ import org.apache.hadoop.metrics.Updater;
|
|||
import org.apache.hadoop.metrics.jvm.JvmMetrics;
|
||||
import org.apache.hadoop.metrics.util.MetricsRegistry;
|
||||
|
||||
public class StargateMetrics implements Updater {
|
||||
public class RESTMetrics implements Updater {
|
||||
private final MetricsRecord metricsRecord;
|
||||
private final MetricsRegistry registry = new MetricsRegistry();
|
||||
private final StargateStatistics stargateStatistics;
|
||||
private final RESTStatistics restStatistics;
|
||||
|
||||
private MetricsRate requests = new MetricsRate("requests", registry);
|
||||
|
||||
public StargateMetrics() {
|
||||
MetricsContext context = MetricsUtil.getContext("stargate");
|
||||
metricsRecord = MetricsUtil.createRecord(context, "stargate");
|
||||
public RESTMetrics() {
|
||||
MetricsContext context = MetricsUtil.getContext("rest");
|
||||
metricsRecord = MetricsUtil.createRecord(context, "rest");
|
||||
String name = Thread.currentThread().getName();
|
||||
metricsRecord.setTag("Master", name);
|
||||
metricsRecord.setTag("REST", name);
|
||||
context.registerUpdater(this);
|
||||
JvmMetrics.init("Stargate", name);
|
||||
JvmMetrics.init("rest", name);
|
||||
// expose the MBean for metrics
|
||||
stargateStatistics = new StargateStatistics(registry);
|
||||
restStatistics = new RESTStatistics(registry);
|
||||
|
||||
}
|
||||
|
||||
public void shutdown() {
|
||||
if (stargateStatistics != null) {
|
||||
stargateStatistics.shutdown();
|
||||
if (restStatistics != null) {
|
||||
restStatistics.shutdown();
|
||||
}
|
||||
}
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.metrics;
|
||||
package org.apache.hadoop.hbase.rest.metrics;
|
||||
|
||||
import javax.management.ObjectName;
|
||||
|
||||
|
@ -27,13 +27,12 @@ import org.apache.hadoop.hbase.metrics.MetricsMBeanBase;
|
|||
import org.apache.hadoop.metrics.util.MBeanUtil;
|
||||
import org.apache.hadoop.metrics.util.MetricsRegistry;
|
||||
|
||||
public class StargateStatistics extends MetricsMBeanBase {
|
||||
public class RESTStatistics extends MetricsMBeanBase {
|
||||
private final ObjectName mbeanName;
|
||||
|
||||
public StargateStatistics(MetricsRegistry registry) {
|
||||
super(registry, "StargateStatistics");
|
||||
mbeanName = MBeanUtil.registerMBean("Stargate",
|
||||
"StargateStatistics", this);
|
||||
public RESTStatistics(MetricsRegistry registry) {
|
||||
super(registry, "restStatistics");
|
||||
mbeanName = MBeanUtil.registerMBean("rest", "restStatistics", this);
|
||||
}
|
||||
|
||||
public void shutdown() {
|
|
@ -18,20 +18,19 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
import javax.xml.bind.annotation.XmlValue;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
|
||||
|
@ -55,7 +54,6 @@ import com.google.protobuf.ByteString;
|
|||
* </pre>
|
||||
*/
|
||||
@XmlRootElement(name="Cell")
|
||||
@XmlType(propOrder={"column","timestamp"})
|
||||
public class CellModel implements ProtobufMessageHandler, Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -29,9 +29,9 @@ import javax.xml.bind.annotation.XmlRootElement;
|
|||
import javax.xml.bind.annotation.XmlElement;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
|
@ -27,7 +27,6 @@ import java.util.Map;
|
|||
import javax.xml.bind.annotation.XmlAnyAttribute;
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
* </pre>
|
||||
*/
|
||||
@XmlRootElement(name="ColumnSchema")
|
||||
@XmlType(propOrder = {"name"})
|
||||
public class ColumnSchemaModel implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static QName BLOCKCACHE = new QName(HColumnDescriptor.BLOCKCACHE);
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -29,7 +29,7 @@ import javax.xml.bind.annotation.XmlAttribute;
|
|||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
|
||||
/**
|
||||
* Representation of a row. A row is a related set of cells, grouped by common
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.filter.ValueFilter;
|
|||
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
||||
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -30,8 +30,8 @@ import javax.xml.bind.annotation.XmlElement;
|
|||
import javax.xml.bind.annotation.XmlElementWrapper;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -28,10 +28,9 @@ import java.util.List;
|
|||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
|
||||
|
@ -49,7 +48,6 @@ import com.google.protobuf.ByteString;
|
|||
* </pre>
|
||||
*/
|
||||
@XmlRootElement(name="TableInfo")
|
||||
@XmlType(propOrder = {"name","regions"})
|
||||
public class TableInfoModel implements Serializable, ProtobufMessageHandler {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -28,8 +28,8 @@ import java.util.List;
|
|||
import javax.xml.bind.annotation.XmlElementRef;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
|
||||
|
||||
/**
|
||||
* Simple representation of a list of table names.
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
|
@ -18,13 +18,12 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
|
@ -43,7 +42,6 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
* </pre>
|
||||
*/
|
||||
@XmlRootElement(name="Region")
|
||||
@XmlType(propOrder = {"name","id","startKey","endKey","location"})
|
||||
public class TableRegionModel implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -32,16 +32,15 @@ import javax.xml.bind.annotation.XmlAnyAttribute;
|
|||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -59,7 +58,6 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
* </pre>
|
||||
*/
|
||||
@XmlRootElement(name="TableSchema")
|
||||
@XmlType(propOrder = {"name","columns"})
|
||||
public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final QName IS_META = new QName(HTableDescriptor.IS_META);
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.model;
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
@ -27,17 +27,17 @@ import javax.servlet.ServletContext;
|
|||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.stargate.RESTServlet;
|
||||
import org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.RESTServlet;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
|
||||
|
||||
import com.sun.jersey.spi.container.servlet.ServletContainer;
|
||||
|
||||
/**
|
||||
* A representation of the collection of versions of the Stargate software
|
||||
* A representation of the collection of versions of the REST gateway software
|
||||
* components.
|
||||
* <ul>
|
||||
* <li>stargateVersion: Stargate revision</li>
|
||||
* <li>restVersion: REST gateway revision</li>
|
||||
* <li>jvmVersion: the JVM vendor and version information</li>
|
||||
* <li>osVersion: the OS type, version, and hardware architecture</li>
|
||||
* <li>serverVersion: the name and version of the servlet container</li>
|
||||
|
@ -49,7 +49,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String stargateVersion;
|
||||
private String restVersion;
|
||||
private String jvmVersion;
|
||||
private String osVersion;
|
||||
private String serverVersion;
|
||||
|
@ -65,7 +65,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
* @param context the servlet context
|
||||
*/
|
||||
public VersionModel(ServletContext context) {
|
||||
stargateVersion = RESTServlet.VERSION_STRING;
|
||||
restVersion = RESTServlet.VERSION_STRING;
|
||||
jvmVersion = System.getProperty("java.vm.vendor") + ' ' +
|
||||
System.getProperty("java.version") + '-' +
|
||||
System.getProperty("java.vm.version");
|
||||
|
@ -78,18 +78,18 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
}
|
||||
|
||||
/**
|
||||
* @return the Stargate version
|
||||
* @return the REST gateway version
|
||||
*/
|
||||
@XmlAttribute(name="Stargate")
|
||||
public String getStargateVersion() {
|
||||
return stargateVersion;
|
||||
@XmlAttribute(name="REST")
|
||||
public String getRESTVersion() {
|
||||
return restVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the JVM vendor and version
|
||||
*/
|
||||
@XmlAttribute(name="JVM")
|
||||
public String getJvmVersion() {
|
||||
public String getJVMVersion() {
|
||||
return jvmVersion;
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
* @return the OS name, version, and hardware architecture
|
||||
*/
|
||||
@XmlAttribute(name="OS")
|
||||
public String getOsVersion() {
|
||||
public String getOSVersion() {
|
||||
return osVersion;
|
||||
}
|
||||
|
||||
|
@ -118,23 +118,23 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param version the Stargate version string
|
||||
* @param version the REST gateway version string
|
||||
*/
|
||||
public void setStargateVersion(String version) {
|
||||
this.stargateVersion = version;
|
||||
public void setRESTVersion(String version) {
|
||||
this.restVersion = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param version the OS version string
|
||||
*/
|
||||
public void setOsVersion(String version) {
|
||||
public void setOSVersion(String version) {
|
||||
this.osVersion = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param version the JVM version string
|
||||
*/
|
||||
public void setJvmVersion(String version) {
|
||||
public void setJVMVersion(String version) {
|
||||
this.jvmVersion = version;
|
||||
}
|
||||
|
||||
|
@ -158,8 +158,8 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Stargate ");
|
||||
sb.append(stargateVersion);
|
||||
sb.append("rest ");
|
||||
sb.append(restVersion);
|
||||
sb.append(" [JVM: ");
|
||||
sb.append(jvmVersion);
|
||||
sb.append("] [OS: ");
|
||||
|
@ -175,7 +175,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
@Override
|
||||
public byte[] createProtobufOutput() {
|
||||
Version.Builder builder = Version.newBuilder();
|
||||
builder.setStargateVersion(stargateVersion);
|
||||
builder.setRestVersion(restVersion);
|
||||
builder.setJvmVersion(jvmVersion);
|
||||
builder.setOsVersion(osVersion);
|
||||
builder.setServerVersion(serverVersion);
|
||||
|
@ -188,8 +188,8 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
|||
throws IOException {
|
||||
Version.Builder builder = Version.newBuilder();
|
||||
builder.mergeFrom(message);
|
||||
if (builder.hasStargateVersion()) {
|
||||
stargateVersion = builder.getStargateVersion();
|
||||
if (builder.hasRestVersion()) {
|
||||
restVersion = builder.getRestVersion();
|
||||
}
|
||||
if (builder.hasJvmVersion()) {
|
||||
jvmVersion = builder.getJvmVersion();
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: CellMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class CellMessage {
|
||||
private CellMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class CellMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// optional bytes row = 1;
|
||||
|
@ -114,41 +114,41 @@ public final class CellMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -157,7 +157,7 @@ public final class CellMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -168,12 +168,12 @@ public final class CellMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -183,25 +183,25 @@ public final class CellMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -210,7 +210,7 @@ public final class CellMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -220,24 +220,24 @@ public final class CellMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -246,27 +246,27 @@ public final class CellMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDefaultInstance()) return this;
|
||||
if (other.hasRow()) {
|
||||
setRow(other.getRow());
|
||||
}
|
||||
|
@ -406,23 +406,23 @@ public final class CellMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Cell)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Cell(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Cell)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Cell)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -432,24 +432,24 @@ public final class CellMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\021CellMessage.proto\0223org.apache.hadoop.h" +
|
||||
"base.stargate.protobuf.generated\"D\n\004Cell" +
|
||||
"\022\013\n\003row\030\001 \001(\014\022\016\n\006column\030\002 \001(\014\022\021\n\ttimesta" +
|
||||
"mp\030\003 \001(\003\022\014\n\004data\030\004 \001(\014"
|
||||
"\n\021CellMessage.proto\022/org.apache.hadoop.h" +
|
||||
"base.rest.protobuf.generated\"D\n\004Cell\022\013\n\003" +
|
||||
"row\030\001 \001(\014\022\016\n\006column\030\002 \001(\014\022\021\n\ttimestamp\030\003" +
|
||||
" \001(\003\022\014\n\004data\030\004 \001(\014"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Cell_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor,
|
||||
new java.lang.String[] { "Row", "Column", "Timestamp", "Data", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: CellSetMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class CellSetMessage {
|
||||
private CellSetMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class CellSetMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static final class Row extends
|
||||
|
@ -54,12 +54,12 @@ public final class CellSetMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// required bytes key = 1;
|
||||
|
@ -69,15 +69,15 @@ public final class CellSetMessage {
|
|||
public boolean hasKey() { return hasKey; }
|
||||
public com.google.protobuf.ByteString getKey() { return key_; }
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.Cell values = 2;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.Cell values = 2;
|
||||
public static final int VALUES_FIELD_NUMBER = 2;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell> values_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell> values_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell> getValuesList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell> getValuesList() {
|
||||
return values_;
|
||||
}
|
||||
public int getValuesCount() { return values_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell getValues(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getValues(int index) {
|
||||
return values_.get(index);
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ public final class CellSetMessage {
|
|||
if (hasKey()) {
|
||||
output.writeBytes(1, getKey());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell element : getValuesList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell element : getValuesList()) {
|
||||
output.writeMessage(2, element);
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
|
@ -110,7 +110,7 @@ public final class CellSetMessage {
|
|||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeBytesSize(1, getKey());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell element : getValuesList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell element : getValuesList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(2, element);
|
||||
}
|
||||
|
@ -119,41 +119,41 @@ public final class CellSetMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -162,7 +162,7 @@ public final class CellSetMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -173,12 +173,12 @@ public final class CellSetMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -188,25 +188,25 @@ public final class CellSetMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,7 @@ public final class CellSetMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -225,24 +225,24 @@ public final class CellSetMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -251,7 +251,7 @@ public final class CellSetMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -260,28 +260,28 @@ public final class CellSetMessage {
|
|||
result.values_ =
|
||||
java.util.Collections.unmodifiableList(result.values_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.getDefaultInstance()) return this;
|
||||
if (other.hasKey()) {
|
||||
setKey(other.getKey());
|
||||
}
|
||||
if (!other.values_.isEmpty()) {
|
||||
if (result.values_.isEmpty()) {
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell>();
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell>();
|
||||
}
|
||||
result.values_.addAll(other.values_);
|
||||
}
|
||||
|
@ -315,7 +315,7 @@ public final class CellSetMessage {
|
|||
break;
|
||||
}
|
||||
case 18: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addValues(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -346,48 +346,48 @@ public final class CellSetMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.Cell values = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell> getValuesList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.Cell values = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell> getValuesList() {
|
||||
return java.util.Collections.unmodifiableList(result.values_);
|
||||
}
|
||||
public int getValuesCount() {
|
||||
return result.getValuesCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell getValues(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getValues(int index) {
|
||||
return result.getValues(index);
|
||||
}
|
||||
public Builder setValues(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell value) {
|
||||
public Builder setValues(int index, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.values_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setValues(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.Builder builderForValue) {
|
||||
public Builder setValues(int index, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder builderForValue) {
|
||||
result.values_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addValues(org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell value) {
|
||||
public Builder addValues(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.values_.isEmpty()) {
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell>();
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell>();
|
||||
}
|
||||
result.values_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addValues(org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell.Builder builderForValue) {
|
||||
public Builder addValues(org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder builderForValue) {
|
||||
if (result.values_.isEmpty()) {
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell>();
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell>();
|
||||
}
|
||||
result.values_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllValues(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell> values) {
|
||||
if (result.values_.isEmpty()) {
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.Cell>();
|
||||
result.values_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell>();
|
||||
}
|
||||
super.addAll(values, result.values_);
|
||||
return this;
|
||||
|
@ -397,34 +397,34 @@ public final class CellSetMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Row(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row)
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row rows = 1;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row rows = 1;
|
||||
public static final int ROWS_FIELD_NUMBER = 1;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row> rows_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row> rows_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row> getRowsList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row> getRowsList() {
|
||||
return rows_;
|
||||
}
|
||||
public int getRowsCount() { return rows_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row getRows(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row getRows(int index) {
|
||||
return rows_.get(index);
|
||||
}
|
||||
|
||||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -433,7 +433,7 @@ public final class CellSetMessage {
|
|||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
output.writeMessage(1, element);
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
|
@ -445,7 +445,7 @@ public final class CellSetMessage {
|
|||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row element : getRowsList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(1, element);
|
||||
}
|
||||
|
@ -454,41 +454,41 @@ public final class CellSetMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -497,7 +497,7 @@ public final class CellSetMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -508,12 +508,12 @@ public final class CellSetMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -523,25 +523,25 @@ public final class CellSetMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -550,7 +550,7 @@ public final class CellSetMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -560,24 +560,24 @@ public final class CellSetMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -586,7 +586,7 @@ public final class CellSetMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -595,25 +595,25 @@ public final class CellSetMessage {
|
|||
result.rows_ =
|
||||
java.util.Collections.unmodifiableList(result.rows_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.getDefaultInstance()) return this;
|
||||
if (!other.rows_.isEmpty()) {
|
||||
if (result.rows_.isEmpty()) {
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
}
|
||||
result.rows_.addAll(other.rows_);
|
||||
}
|
||||
|
@ -643,7 +643,7 @@ public final class CellSetMessage {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addRows(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -653,48 +653,48 @@ public final class CellSetMessage {
|
|||
}
|
||||
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet.Row rows = 1;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row> getRowsList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row rows = 1;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row> getRowsList() {
|
||||
return java.util.Collections.unmodifiableList(result.rows_);
|
||||
}
|
||||
public int getRowsCount() {
|
||||
return result.getRowsCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row getRows(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row getRows(int index) {
|
||||
return result.getRows(index);
|
||||
}
|
||||
public Builder setRows(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row value) {
|
||||
public Builder setRows(int index, org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.rows_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setRows(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.Builder builderForValue) {
|
||||
public Builder setRows(int index, org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.Builder builderForValue) {
|
||||
result.rows_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addRows(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row value) {
|
||||
public Builder addRows(org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.rows_.isEmpty()) {
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
}
|
||||
result.rows_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addRows(org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.Builder builderForValue) {
|
||||
public Builder addRows(org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.Builder builderForValue) {
|
||||
if (result.rows_.isEmpty()) {
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
}
|
||||
result.rows_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllRows(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row> values) {
|
||||
if (result.rows_.isEmpty()) {
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
result.rows_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row>();
|
||||
}
|
||||
super.addAll(values, result.rows_);
|
||||
return this;
|
||||
|
@ -704,28 +704,28 @@ public final class CellSetMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.CellSet)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new CellSet(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.CellSet)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.CellSet)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_fieldAccessorTable;
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -735,43 +735,42 @@ public final class CellSetMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024CellSetMessage.proto\0223org.apache.hadoo" +
|
||||
"p.hbase.stargate.protobuf.generated\032\021Cel" +
|
||||
"lMessage.proto\"\270\001\n\007CellSet\022N\n\004rows\030\001 \003(\013" +
|
||||
"2@.org.apache.hadoop.hbase.stargate.prot" +
|
||||
"obuf.generated.CellSet.Row\032]\n\003Row\022\013\n\003key" +
|
||||
"\030\001 \002(\014\022I\n\006values\030\002 \003(\01329.org.apache.hado" +
|
||||
"op.hbase.stargate.protobuf.generated.Cel" +
|
||||
"l"
|
||||
"\n\024CellSetMessage.proto\022/org.apache.hadoo" +
|
||||
"p.hbase.rest.protobuf.generated\032\021CellMes" +
|
||||
"sage.proto\"\260\001\n\007CellSet\022J\n\004rows\030\001 \003(\0132<.o" +
|
||||
"rg.apache.hadoop.hbase.rest.protobuf.gen" +
|
||||
"erated.CellSet.Row\032Y\n\003Row\022\013\n\003key\030\001 \002(\014\022E" +
|
||||
"\n\006values\030\002 \003(\01325.org.apache.hadoop.hbase" +
|
||||
".rest.protobuf.generated.Cell"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor,
|
||||
new java.lang.String[] { "Rows", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_CellSet_Row_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_Row_descriptor,
|
||||
new java.lang.String[] { "Key", "Values", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellSetMessage.CellSet.Row.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.CellMessage.getDescriptor(),
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: ColumnSchemaMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class ColumnSchemaMessage {
|
||||
private ColumnSchemaMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class ColumnSchemaMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static final class Attribute extends
|
||||
|
@ -54,12 +54,12 @@ public final class ColumnSchemaMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// required string name = 1;
|
||||
|
@ -115,41 +115,41 @@ public final class ColumnSchemaMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -158,7 +158,7 @@ public final class ColumnSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -169,12 +169,12 @@ public final class ColumnSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -184,25 +184,25 @@ public final class ColumnSchemaMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ public final class ColumnSchemaMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -221,24 +221,24 @@ public final class ColumnSchemaMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -247,27 +247,27 @@ public final class ColumnSchemaMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
|
@ -354,16 +354,16 @@ public final class ColumnSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Attribute(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute)
|
||||
}
|
||||
|
||||
// optional string name = 1;
|
||||
|
@ -373,15 +373,15 @@ public final class ColumnSchemaMessage {
|
|||
public boolean hasName() { return hasName; }
|
||||
public java.lang.String getName() { return name_; }
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute attrs = 2;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute attrs = 2;
|
||||
public static final int ATTRS_FIELD_NUMBER = 2;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> attrs_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> attrs_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
|
||||
return attrs_;
|
||||
}
|
||||
public int getAttrsCount() { return attrs_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
|
||||
return attrs_.get(index);
|
||||
}
|
||||
|
||||
|
@ -409,7 +409,7 @@ public final class ColumnSchemaMessage {
|
|||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -421,7 +421,7 @@ public final class ColumnSchemaMessage {
|
|||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
output.writeMessage(2, element);
|
||||
}
|
||||
if (hasTtl()) {
|
||||
|
@ -446,7 +446,7 @@ public final class ColumnSchemaMessage {
|
|||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeStringSize(1, getName());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(2, element);
|
||||
}
|
||||
|
@ -467,41 +467,41 @@ public final class ColumnSchemaMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -510,7 +510,7 @@ public final class ColumnSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -521,12 +521,12 @@ public final class ColumnSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -536,25 +536,25 @@ public final class ColumnSchemaMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -563,7 +563,7 @@ public final class ColumnSchemaMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -573,24 +573,24 @@ public final class ColumnSchemaMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -599,7 +599,7 @@ public final class ColumnSchemaMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -608,28 +608,28 @@ public final class ColumnSchemaMessage {
|
|||
result.attrs_ =
|
||||
java.util.Collections.unmodifiableList(result.attrs_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
if (!other.attrs_.isEmpty()) {
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
}
|
||||
result.attrs_.addAll(other.attrs_);
|
||||
}
|
||||
|
@ -672,7 +672,7 @@ public final class ColumnSchemaMessage {
|
|||
break;
|
||||
}
|
||||
case 18: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addAttrs(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -715,48 +715,48 @@ public final class ColumnSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema.Attribute attrs = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute attrs = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
|
||||
return java.util.Collections.unmodifiableList(result.attrs_);
|
||||
}
|
||||
public int getAttrsCount() {
|
||||
return result.getAttrsCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
|
||||
return result.getAttrs(index);
|
||||
}
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.attrs_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
|
||||
result.attrs_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
}
|
||||
result.attrs_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
}
|
||||
result.attrs_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllAttrs(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> values) {
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
|
||||
}
|
||||
super.addAll(values, result.attrs_);
|
||||
return this;
|
||||
|
@ -823,28 +823,28 @@ public final class ColumnSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new ColumnSchema(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable;
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -854,36 +854,36 @@ public final class ColumnSchemaMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\031ColumnSchemaMessage.proto\0223org.apache." +
|
||||
"hadoop.hbase.stargate.protobuf.generated" +
|
||||
"\"\331\001\n\014ColumnSchema\022\014\n\004name\030\001 \001(\t\022Z\n\005attrs" +
|
||||
"\030\002 \003(\0132K.org.apache.hadoop.hbase.stargat" +
|
||||
"e.protobuf.generated.ColumnSchema.Attrib" +
|
||||
"ute\022\013\n\003ttl\030\003 \001(\005\022\023\n\013maxVersions\030\004 \001(\005\022\023\n" +
|
||||
"\013compression\030\005 \001(\t\032(\n\tAttribute\022\014\n\004name\030" +
|
||||
"\001 \002(\t\022\r\n\005value\030\002 \002(\t"
|
||||
"\n\031ColumnSchemaMessage.proto\022/org.apache." +
|
||||
"hadoop.hbase.rest.protobuf.generated\"\325\001\n" +
|
||||
"\014ColumnSchema\022\014\n\004name\030\001 \001(\t\022V\n\005attrs\030\002 \003" +
|
||||
"(\0132G.org.apache.hadoop.hbase.rest.protob" +
|
||||
"uf.generated.ColumnSchema.Attribute\022\013\n\003t" +
|
||||
"tl\030\003 \001(\005\022\023\n\013maxVersions\030\004 \001(\005\022\023\n\013compres" +
|
||||
"sion\030\005 \001(\t\032(\n\tAttribute\022\014\n\004name\030\001 \002(\t\022\r\n" +
|
||||
"\005value\030\002 \002(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor,
|
||||
new java.lang.String[] { "Name", "Attrs", "Ttl", "MaxVersions", "Compression", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_ColumnSchema_Attribute_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor,
|
||||
new java.lang.String[] { "Name", "Value", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: ScannerMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class ScannerMessage {
|
||||
private ScannerMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class ScannerMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// optional bytes startRow = 1;
|
||||
|
@ -180,41 +180,41 @@ public final class ScannerMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -223,7 +223,7 @@ public final class ScannerMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -234,12 +234,12 @@ public final class ScannerMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -249,25 +249,25 @@ public final class ScannerMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -276,7 +276,7 @@ public final class ScannerMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -286,24 +286,24 @@ public final class ScannerMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -312,7 +312,7 @@ public final class ScannerMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -321,22 +321,22 @@ public final class ScannerMessage {
|
|||
result.columns_ =
|
||||
java.util.Collections.unmodifiableList(result.columns_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance()) return this;
|
||||
if (other.hasStartRow()) {
|
||||
setStartRow(other.getStartRow());
|
||||
}
|
||||
|
@ -601,23 +601,23 @@ public final class ScannerMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Scanner)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Scanner)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Scanner(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Scanner)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Scanner)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -627,26 +627,26 @@ public final class ScannerMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024ScannerMessage.proto\0223org.apache.hadoo" +
|
||||
"p.hbase.stargate.protobuf.generated\"\224\001\n\007" +
|
||||
"Scanner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(" +
|
||||
"\014\022\017\n\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tsta" +
|
||||
"rtTime\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003\022\023\n\013maxVers" +
|
||||
"ions\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t"
|
||||
"\n\024ScannerMessage.proto\022/org.apache.hadoo" +
|
||||
"p.hbase.rest.protobuf.generated\"\224\001\n\007Scan" +
|
||||
"ner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(\014\022\017\n" +
|
||||
"\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tstartTi" +
|
||||
"me\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003\022\023\n\013maxVersions" +
|
||||
"\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Scanner_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor,
|
||||
new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ScannerMessage.Scanner.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: StorageClusterStatusMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class StorageClusterStatusMessage {
|
||||
private StorageClusterStatusMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static final class Region extends
|
||||
|
@ -54,12 +54,12 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// required bytes name = 1;
|
||||
|
@ -170,41 +170,41 @@ public final class StorageClusterStatusMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -213,7 +213,7 @@ public final class StorageClusterStatusMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -224,12 +224,12 @@ public final class StorageClusterStatusMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -239,25 +239,25 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -266,7 +266,7 @@ public final class StorageClusterStatusMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -276,24 +276,24 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -302,27 +302,27 @@ public final class StorageClusterStatusMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
|
@ -506,16 +506,16 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Region)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Region(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Region)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region)
|
||||
}
|
||||
|
||||
public static final class Node extends
|
||||
|
@ -537,12 +537,12 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// required string name = 1;
|
||||
|
@ -580,15 +580,15 @@ public final class StorageClusterStatusMessage {
|
|||
public boolean hasMaxHeapSizeMB() { return hasMaxHeapSizeMB; }
|
||||
public int getMaxHeapSizeMB() { return maxHeapSizeMB_; }
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Region regions = 6;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;
|
||||
public static final int REGIONS_FIELD_NUMBER = 6;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> regions_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> regions_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> getRegionsList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> getRegionsList() {
|
||||
return regions_;
|
||||
}
|
||||
public int getRegionsCount() { return regions_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getRegions(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getRegions(int index) {
|
||||
return regions_.get(index);
|
||||
}
|
||||
|
||||
|
@ -596,7 +596,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -620,7 +620,7 @@ public final class StorageClusterStatusMessage {
|
|||
if (hasMaxHeapSizeMB()) {
|
||||
output.writeInt32(5, getMaxHeapSizeMB());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
output.writeMessage(6, element);
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
|
@ -652,7 +652,7 @@ public final class StorageClusterStatusMessage {
|
|||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeInt32Size(5, getMaxHeapSizeMB());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region element : getRegionsList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(6, element);
|
||||
}
|
||||
|
@ -661,41 +661,41 @@ public final class StorageClusterStatusMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -704,7 +704,7 @@ public final class StorageClusterStatusMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -715,12 +715,12 @@ public final class StorageClusterStatusMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -730,25 +730,25 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -757,7 +757,7 @@ public final class StorageClusterStatusMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -767,24 +767,24 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -793,7 +793,7 @@ public final class StorageClusterStatusMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -802,22 +802,22 @@ public final class StorageClusterStatusMessage {
|
|||
result.regions_ =
|
||||
java.util.Collections.unmodifiableList(result.regions_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
|
@ -835,7 +835,7 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
if (!other.regions_.isEmpty()) {
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
}
|
||||
result.regions_.addAll(other.regions_);
|
||||
}
|
||||
|
@ -885,7 +885,7 @@ public final class StorageClusterStatusMessage {
|
|||
break;
|
||||
}
|
||||
case 50: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addRegions(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -988,48 +988,48 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Region regions = 6;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> getRegionsList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Region regions = 6;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> getRegionsList() {
|
||||
return java.util.Collections.unmodifiableList(result.regions_);
|
||||
}
|
||||
public int getRegionsCount() {
|
||||
return result.getRegionsCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getRegions(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region getRegions(int index) {
|
||||
return result.getRegions(index);
|
||||
}
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region value) {
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.regions_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder builderForValue) {
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder builderForValue) {
|
||||
result.regions_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addRegions(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region value) {
|
||||
public Builder addRegions(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
}
|
||||
result.regions_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addRegions(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder builderForValue) {
|
||||
public Builder addRegions(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder builderForValue) {
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
}
|
||||
result.regions_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllRegions(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region> values) {
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region>();
|
||||
}
|
||||
super.addAll(values, result.regions_);
|
||||
return this;
|
||||
|
@ -1039,27 +1039,27 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Node(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node)
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
|
||||
public static final int LIVENODES_FIELD_NUMBER = 1;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> liveNodes_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> liveNodes_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> getLiveNodesList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> getLiveNodesList() {
|
||||
return liveNodes_;
|
||||
}
|
||||
public int getLiveNodesCount() { return liveNodes_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index) {
|
||||
return liveNodes_.get(index);
|
||||
}
|
||||
|
||||
|
@ -1099,7 +1099,7 @@ public final class StorageClusterStatusMessage {
|
|||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -1108,7 +1108,7 @@ public final class StorageClusterStatusMessage {
|
|||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
output.writeMessage(1, element);
|
||||
}
|
||||
for (java.lang.String element : getDeadNodesList()) {
|
||||
|
@ -1132,7 +1132,7 @@ public final class StorageClusterStatusMessage {
|
|||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node element : getLiveNodesList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(1, element);
|
||||
}
|
||||
|
@ -1162,41 +1162,41 @@ public final class StorageClusterStatusMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -1205,7 +1205,7 @@ public final class StorageClusterStatusMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -1216,12 +1216,12 @@ public final class StorageClusterStatusMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -1231,25 +1231,25 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -1258,7 +1258,7 @@ public final class StorageClusterStatusMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1268,24 +1268,24 @@ public final class StorageClusterStatusMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -1294,7 +1294,7 @@ public final class StorageClusterStatusMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -1307,25 +1307,25 @@ public final class StorageClusterStatusMessage {
|
|||
result.deadNodes_ =
|
||||
java.util.Collections.unmodifiableList(result.deadNodes_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.getDefaultInstance()) return this;
|
||||
if (!other.liveNodes_.isEmpty()) {
|
||||
if (result.liveNodes_.isEmpty()) {
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
}
|
||||
result.liveNodes_.addAll(other.liveNodes_);
|
||||
}
|
||||
|
@ -1370,7 +1370,7 @@ public final class StorageClusterStatusMessage {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addLiveNodes(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -1396,48 +1396,48 @@ public final class StorageClusterStatusMessage {
|
|||
}
|
||||
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> getLiveNodesList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> getLiveNodesList() {
|
||||
return java.util.Collections.unmodifiableList(result.liveNodes_);
|
||||
}
|
||||
public int getLiveNodesCount() {
|
||||
return result.getLiveNodesCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index) {
|
||||
return result.getLiveNodes(index);
|
||||
}
|
||||
public Builder setLiveNodes(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node value) {
|
||||
public Builder setLiveNodes(int index, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.liveNodes_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setLiveNodes(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder builderForValue) {
|
||||
public Builder setLiveNodes(int index, org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder builderForValue) {
|
||||
result.liveNodes_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addLiveNodes(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node value) {
|
||||
public Builder addLiveNodes(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.liveNodes_.isEmpty()) {
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
}
|
||||
result.liveNodes_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addLiveNodes(org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder builderForValue) {
|
||||
public Builder addLiveNodes(org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder builderForValue) {
|
||||
if (result.liveNodes_.isEmpty()) {
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
}
|
||||
result.liveNodes_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllLiveNodes(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> values) {
|
||||
if (result.liveNodes_.isEmpty()) {
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
result.liveNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node>();
|
||||
}
|
||||
super.addAll(values, result.liveNodes_);
|
||||
return this;
|
||||
|
@ -1541,33 +1541,33 @@ public final class StorageClusterStatusMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new StorageClusterStatus(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatus)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable;
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable;
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -1577,52 +1577,52 @@ public final class StorageClusterStatusMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n!StorageClusterStatusMessage.proto\0223org" +
|
||||
".apache.hadoop.hbase.stargate.protobuf.g" +
|
||||
"enerated\"\232\004\n\024StorageClusterStatus\022a\n\tliv" +
|
||||
"eNodes\030\001 \003(\0132N.org.apache.hadoop.hbase.s" +
|
||||
"targate.protobuf.generated.StorageCluste" +
|
||||
"rStatus.Node\022\021\n\tdeadNodes\030\002 \003(\t\022\017\n\007regio" +
|
||||
"ns\030\003 \001(\005\022\020\n\010requests\030\004 \001(\005\022\023\n\013averageLoa" +
|
||||
"d\030\005 \001(\001\032\211\001\n\006Region\022\014\n\004name\030\001 \002(\014\022\016\n\006stor" +
|
||||
"es\030\002 \001(\005\022\022\n\nstorefiles\030\003 \001(\005\022\027\n\017storefil" +
|
||||
"eSizeMB\030\004 \001(\005\022\026\n\016memstoreSizeMB\030\005 \001(\005\022\034\n",
|
||||
"\024storefileIndexSizeMB\030\006 \001(\005\032\307\001\n\004Node\022\014\n\004" +
|
||||
"name\030\001 \002(\t\022\021\n\tstartCode\030\002 \001(\003\022\020\n\010request" +
|
||||
"s\030\003 \001(\005\022\022\n\nheapSizeMB\030\004 \001(\005\022\025\n\rmaxHeapSi" +
|
||||
"zeMB\030\005 \001(\005\022a\n\007regions\030\006 \003(\0132P.org.apache" +
|
||||
".hadoop.hbase.stargate.protobuf.generate" +
|
||||
"d.StorageClusterStatus.Region"
|
||||
"\n!StorageClusterStatusMessage.proto\022/org" +
|
||||
".apache.hadoop.hbase.rest.protobuf.gener" +
|
||||
"ated\"\222\004\n\024StorageClusterStatus\022]\n\tliveNod" +
|
||||
"es\030\001 \003(\0132J.org.apache.hadoop.hbase.rest." +
|
||||
"protobuf.generated.StorageClusterStatus." +
|
||||
"Node\022\021\n\tdeadNodes\030\002 \003(\t\022\017\n\007regions\030\003 \001(\005" +
|
||||
"\022\020\n\010requests\030\004 \001(\005\022\023\n\013averageLoad\030\005 \001(\001\032" +
|
||||
"\211\001\n\006Region\022\014\n\004name\030\001 \002(\014\022\016\n\006stores\030\002 \001(\005" +
|
||||
"\022\022\n\nstorefiles\030\003 \001(\005\022\027\n\017storefileSizeMB\030" +
|
||||
"\004 \001(\005\022\026\n\016memstoreSizeMB\030\005 \001(\005\022\034\n\024storefi",
|
||||
"leIndexSizeMB\030\006 \001(\005\032\303\001\n\004Node\022\014\n\004name\030\001 \002" +
|
||||
"(\t\022\021\n\tstartCode\030\002 \001(\003\022\020\n\010requests\030\003 \001(\005\022" +
|
||||
"\022\n\nheapSizeMB\030\004 \001(\005\022\025\n\rmaxHeapSizeMB\030\005 \001" +
|
||||
"(\005\022]\n\007regions\030\006 \003(\0132L.org.apache.hadoop." +
|
||||
"hbase.rest.protobuf.generated.StorageClu" +
|
||||
"sterStatus.Region"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor,
|
||||
new java.lang.String[] { "LiveNodes", "DeadNodes", "Regions", "Requests", "AverageLoad", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Region_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Region_descriptor,
|
||||
new java.lang.String[] { "Name", "Stores", "Storefiles", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_descriptor.getNestedTypes().get(1);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Region.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_descriptor.getNestedTypes().get(1);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_StorageClusterStatus_Node_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_StorageClusterStatus_Node_descriptor,
|
||||
new java.lang.String[] { "Name", "StartCode", "Requests", "HeapSizeMB", "MaxHeapSizeMB", "Regions", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: TableInfoMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class TableInfoMessage {
|
||||
private TableInfoMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class TableInfoMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static final class Region extends
|
||||
|
@ -54,12 +54,12 @@ public final class TableInfoMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// required string name = 1;
|
||||
|
@ -156,41 +156,41 @@ public final class TableInfoMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -199,7 +199,7 @@ public final class TableInfoMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -210,12 +210,12 @@ public final class TableInfoMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -225,25 +225,25 @@ public final class TableInfoMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -252,7 +252,7 @@ public final class TableInfoMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -262,24 +262,24 @@ public final class TableInfoMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -288,27 +288,27 @@ public final class TableInfoMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
|
@ -476,16 +476,16 @@ public final class TableInfoMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo.Region)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Region(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo.Region)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region)
|
||||
}
|
||||
|
||||
// required string name = 1;
|
||||
|
@ -495,15 +495,15 @@ public final class TableInfoMessage {
|
|||
public boolean hasName() { return hasName; }
|
||||
public java.lang.String getName() { return name_; }
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo.Region regions = 2;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2;
|
||||
public static final int REGIONS_FIELD_NUMBER = 2;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region> regions_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> regions_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList() {
|
||||
return regions_;
|
||||
}
|
||||
public int getRegionsCount() { return regions_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index) {
|
||||
return regions_.get(index);
|
||||
}
|
||||
|
||||
|
@ -511,7 +511,7 @@ public final class TableInfoMessage {
|
|||
}
|
||||
public final boolean isInitialized() {
|
||||
if (!hasName) return false;
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -523,7 +523,7 @@ public final class TableInfoMessage {
|
|||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
output.writeMessage(2, element);
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
|
@ -539,7 +539,7 @@ public final class TableInfoMessage {
|
|||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeStringSize(1, getName());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(2, element);
|
||||
}
|
||||
|
@ -548,41 +548,41 @@ public final class TableInfoMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -591,7 +591,7 @@ public final class TableInfoMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -602,12 +602,12 @@ public final class TableInfoMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -617,25 +617,25 @@ public final class TableInfoMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -644,7 +644,7 @@ public final class TableInfoMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -654,24 +654,24 @@ public final class TableInfoMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -680,7 +680,7 @@ public final class TableInfoMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -689,28 +689,28 @@ public final class TableInfoMessage {
|
|||
result.regions_ =
|
||||
java.util.Collections.unmodifiableList(result.regions_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
if (!other.regions_.isEmpty()) {
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
}
|
||||
result.regions_.addAll(other.regions_);
|
||||
}
|
||||
|
@ -744,7 +744,7 @@ public final class TableInfoMessage {
|
|||
break;
|
||||
}
|
||||
case 18: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addRegions(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -775,48 +775,48 @@ public final class TableInfoMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo.Region regions = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList() {
|
||||
return java.util.Collections.unmodifiableList(result.regions_);
|
||||
}
|
||||
public int getRegionsCount() {
|
||||
return result.getRegionsCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index) {
|
||||
return result.getRegions(index);
|
||||
}
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region value) {
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.regions_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder builderForValue) {
|
||||
public Builder setRegions(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder builderForValue) {
|
||||
result.regions_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addRegions(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region value) {
|
||||
public Builder addRegions(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
}
|
||||
result.regions_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addRegions(org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder builderForValue) {
|
||||
public Builder addRegions(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder builderForValue) {
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
}
|
||||
result.regions_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllRegions(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> values) {
|
||||
if (result.regions_.isEmpty()) {
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
|
||||
}
|
||||
super.addAll(values, result.regions_);
|
||||
return this;
|
||||
|
@ -826,28 +826,28 @@ public final class TableInfoMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new TableInfo(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfo)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable;
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -857,36 +857,35 @@ public final class TableInfoMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\026TableInfoMessage.proto\0223org.apache.had" +
|
||||
"oop.hbase.stargate.protobuf.generated\"\311\001" +
|
||||
"\n\tTableInfo\022\014\n\004name\030\001 \002(\t\022V\n\007regions\030\002 \003" +
|
||||
"(\0132E.org.apache.hadoop.hbase.stargate.pr" +
|
||||
"otobuf.generated.TableInfo.Region\032V\n\006Reg" +
|
||||
"ion\022\014\n\004name\030\001 \002(\t\022\020\n\010startKey\030\002 \001(\014\022\016\n\006e" +
|
||||
"ndKey\030\003 \001(\014\022\n\n\002id\030\004 \001(\003\022\020\n\010location\030\005 \001(" +
|
||||
"\t"
|
||||
"\n\026TableInfoMessage.proto\022/org.apache.had" +
|
||||
"oop.hbase.rest.protobuf.generated\"\305\001\n\tTa" +
|
||||
"bleInfo\022\014\n\004name\030\001 \002(\t\022R\n\007regions\030\002 \003(\0132A" +
|
||||
".org.apache.hadoop.hbase.rest.protobuf.g" +
|
||||
"enerated.TableInfo.Region\032V\n\006Region\022\014\n\004n" +
|
||||
"ame\030\001 \002(\t\022\020\n\010startKey\030\002 \001(\014\022\016\n\006endKey\030\003 " +
|
||||
"\001(\014\022\n\n\002id\030\004 \001(\003\022\020\n\010location\030\005 \001(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor,
|
||||
new java.lang.String[] { "Name", "Regions", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableInfo_Region_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor,
|
||||
new java.lang.String[] { "Name", "StartKey", "EndKey", "Id", "Location", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: TableListMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class TableListMessage {
|
||||
private TableListMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class TableListMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// repeated string name = 1;
|
||||
|
@ -82,41 +82,41 @@ public final class TableListMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -125,7 +125,7 @@ public final class TableListMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -136,12 +136,12 @@ public final class TableListMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -151,25 +151,25 @@ public final class TableListMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ public final class TableListMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -188,24 +188,24 @@ public final class TableListMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -214,7 +214,7 @@ public final class TableListMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -223,22 +223,22 @@ public final class TableListMessage {
|
|||
result.name_ =
|
||||
java.util.Collections.unmodifiableList(result.name_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance()) return this;
|
||||
if (!other.name_.isEmpty()) {
|
||||
if (result.name_.isEmpty()) {
|
||||
result.name_ = new java.util.ArrayList<java.lang.String>();
|
||||
|
@ -319,23 +319,23 @@ public final class TableListMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableList)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new TableList(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableList)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -345,23 +345,23 @@ public final class TableListMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\026TableListMessage.proto\0223org.apache.had" +
|
||||
"oop.hbase.stargate.protobuf.generated\"\031\n" +
|
||||
"\tTableList\022\014\n\004name\030\001 \003(\t"
|
||||
"\n\026TableListMessage.proto\022/org.apache.had" +
|
||||
"oop.hbase.rest.protobuf.generated\"\031\n\tTab" +
|
||||
"leList\022\014\n\004name\030\001 \003(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableList_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
|
||||
new java.lang.String[] { "Name", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: TableSchemaMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class TableSchemaMessage {
|
||||
private TableSchemaMessage() {}
|
||||
|
@ -27,12 +27,12 @@ public final class TableSchemaMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static final class Attribute extends
|
||||
|
@ -54,12 +54,12 @@ public final class TableSchemaMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// required string name = 1;
|
||||
|
@ -115,41 +115,41 @@ public final class TableSchemaMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -158,7 +158,7 @@ public final class TableSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -169,12 +169,12 @@ public final class TableSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -184,25 +184,25 @@ public final class TableSchemaMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ public final class TableSchemaMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -221,24 +221,24 @@ public final class TableSchemaMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -247,27 +247,27 @@ public final class TableSchemaMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
|
@ -354,16 +354,16 @@ public final class TableSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema.Attribute)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Attribute(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema.Attribute)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute)
|
||||
}
|
||||
|
||||
// optional string name = 1;
|
||||
|
@ -373,27 +373,27 @@ public final class TableSchemaMessage {
|
|||
public boolean hasName() { return hasName; }
|
||||
public java.lang.String getName() { return name_; }
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema.Attribute attrs = 2;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2;
|
||||
public static final int ATTRS_FIELD_NUMBER = 2;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> attrs_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> attrs_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList() {
|
||||
return attrs_;
|
||||
}
|
||||
public int getAttrsCount() { return attrs_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index) {
|
||||
return attrs_.get(index);
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema columns = 3;
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema columns = 3;
|
||||
public static final int COLUMNS_FIELD_NUMBER = 3;
|
||||
private java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema> columns_ =
|
||||
private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> columns_ =
|
||||
java.util.Collections.emptyList();
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList() {
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList() {
|
||||
return columns_;
|
||||
}
|
||||
public int getColumnsCount() { return columns_.size(); }
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index) {
|
||||
return columns_.get(index);
|
||||
}
|
||||
|
||||
|
@ -414,10 +414,10 @@ public final class TableSchemaMessage {
|
|||
private void initFields() {
|
||||
}
|
||||
public final boolean isInitialized() {
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
|
||||
if (!element.isInitialized()) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -429,10 +429,10 @@ public final class TableSchemaMessage {
|
|||
if (hasName()) {
|
||||
output.writeString(1, getName());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
output.writeMessage(2, element);
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
|
||||
output.writeMessage(3, element);
|
||||
}
|
||||
if (hasInMemory()) {
|
||||
|
@ -454,11 +454,11 @@ public final class TableSchemaMessage {
|
|||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeStringSize(1, getName());
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(2, element);
|
||||
}
|
||||
for (org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
|
||||
for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(3, element);
|
||||
}
|
||||
|
@ -475,41 +475,41 @@ public final class TableSchemaMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -518,7 +518,7 @@ public final class TableSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -529,12 +529,12 @@ public final class TableSchemaMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -544,25 +544,25 @@ public final class TableSchemaMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -571,7 +571,7 @@ public final class TableSchemaMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -581,24 +581,24 @@ public final class TableSchemaMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -607,7 +607,7 @@ public final class TableSchemaMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
|
@ -620,34 +620,34 @@ public final class TableSchemaMessage {
|
|||
result.columns_ =
|
||||
java.util.Collections.unmodifiableList(result.columns_);
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance()) return this;
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance()) return this;
|
||||
if (other.hasName()) {
|
||||
setName(other.getName());
|
||||
}
|
||||
if (!other.attrs_.isEmpty()) {
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
}
|
||||
result.attrs_.addAll(other.attrs_);
|
||||
}
|
||||
if (!other.columns_.isEmpty()) {
|
||||
if (result.columns_.isEmpty()) {
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
}
|
||||
result.columns_.addAll(other.columns_);
|
||||
}
|
||||
|
@ -687,13 +687,13 @@ public final class TableSchemaMessage {
|
|||
break;
|
||||
}
|
||||
case 18: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addAttrs(subBuilder.buildPartial());
|
||||
break;
|
||||
}
|
||||
case 26: {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder subBuilder = org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addColumns(subBuilder.buildPartial());
|
||||
break;
|
||||
|
@ -732,48 +732,48 @@ public final class TableSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema.Attribute attrs = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList() {
|
||||
return java.util.Collections.unmodifiableList(result.attrs_);
|
||||
}
|
||||
public int getAttrsCount() {
|
||||
return result.getAttrsCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index) {
|
||||
return result.getAttrs(index);
|
||||
}
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute value) {
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.attrs_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder builderForValue) {
|
||||
public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder builderForValue) {
|
||||
result.attrs_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute value) {
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
}
|
||||
result.attrs_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder builderForValue) {
|
||||
public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder builderForValue) {
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
}
|
||||
result.attrs_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllAttrs(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> values) {
|
||||
if (result.attrs_.isEmpty()) {
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
|
||||
}
|
||||
super.addAll(values, result.attrs_);
|
||||
return this;
|
||||
|
@ -783,48 +783,48 @@ public final class TableSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// repeated .org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchema columns = 3;
|
||||
public java.util.List<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList() {
|
||||
// repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema columns = 3;
|
||||
public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList() {
|
||||
return java.util.Collections.unmodifiableList(result.columns_);
|
||||
}
|
||||
public int getColumnsCount() {
|
||||
return result.getColumnsCount();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index) {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index) {
|
||||
return result.getColumns(index);
|
||||
}
|
||||
public Builder setColumns(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema value) {
|
||||
public Builder setColumns(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.columns_.set(index, value);
|
||||
return this;
|
||||
}
|
||||
public Builder setColumns(int index, org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder builderForValue) {
|
||||
public Builder setColumns(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder builderForValue) {
|
||||
result.columns_.set(index, builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addColumns(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema value) {
|
||||
public Builder addColumns(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (result.columns_.isEmpty()) {
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
}
|
||||
result.columns_.add(value);
|
||||
return this;
|
||||
}
|
||||
public Builder addColumns(org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder builderForValue) {
|
||||
public Builder addColumns(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder builderForValue) {
|
||||
if (result.columns_.isEmpty()) {
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
}
|
||||
result.columns_.add(builderForValue.build());
|
||||
return this;
|
||||
}
|
||||
public Builder addAllColumns(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema> values) {
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> values) {
|
||||
if (result.columns_.isEmpty()) {
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
|
||||
}
|
||||
super.addAll(values, result.columns_);
|
||||
return this;
|
||||
|
@ -870,28 +870,28 @@ public final class TableSchemaMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new TableSchema(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchema)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable;
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -901,45 +901,45 @@ public final class TableSchemaMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\030TableSchemaMessage.proto\0223org.apache.h" +
|
||||
"adoop.hbase.stargate.protobuf.generated\032" +
|
||||
"\031ColumnSchemaMessage.proto\"\230\002\n\013TableSche" +
|
||||
"ma\022\014\n\004name\030\001 \001(\t\022Y\n\005attrs\030\002 \003(\0132J.org.ap" +
|
||||
"ache.hadoop.hbase.stargate.protobuf.gene" +
|
||||
"rated.TableSchema.Attribute\022R\n\007columns\030\003" +
|
||||
" \003(\0132A.org.apache.hadoop.hbase.stargate." +
|
||||
"protobuf.generated.ColumnSchema\022\020\n\010inMem" +
|
||||
"ory\030\004 \001(\010\022\020\n\010readOnly\030\005 \001(\010\032(\n\tAttribute" +
|
||||
"\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t"
|
||||
"\n\030TableSchemaMessage.proto\022/org.apache.h" +
|
||||
"adoop.hbase.rest.protobuf.generated\032\031Col" +
|
||||
"umnSchemaMessage.proto\"\220\002\n\013TableSchema\022\014" +
|
||||
"\n\004name\030\001 \001(\t\022U\n\005attrs\030\002 \003(\0132F.org.apache" +
|
||||
".hadoop.hbase.rest.protobuf.generated.Ta" +
|
||||
"bleSchema.Attribute\022N\n\007columns\030\003 \003(\0132=.o" +
|
||||
"rg.apache.hadoop.hbase.rest.protobuf.gen" +
|
||||
"erated.ColumnSchema\022\020\n\010inMemory\030\004 \001(\010\022\020\n" +
|
||||
"\010readOnly\030\005 \001(\010\032(\n\tAttribute\022\014\n\004name\030\001 \002" +
|
||||
"(\t\022\r\n\005value\030\002 \002(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor,
|
||||
new java.lang.String[] { "Name", "Attrs", "Columns", "InMemory", "ReadOnly", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor.getNestedTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_TableSchema_Attribute_descriptor,
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor,
|
||||
new java.lang.String[] { "Name", "Value", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder.class);
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.getDescriptor(),
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: VersionMessage.proto
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
public final class VersionMessage {
|
||||
private VersionMessage() {}
|
||||
|
@ -27,20 +27,20 @@ public final class VersionMessage {
|
|||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_fieldAccessorTable;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// optional string stargateVersion = 1;
|
||||
public static final int STARGATEVERSION_FIELD_NUMBER = 1;
|
||||
private boolean hasStargateVersion;
|
||||
private java.lang.String stargateVersion_ = "";
|
||||
public boolean hasStargateVersion() { return hasStargateVersion; }
|
||||
public java.lang.String getStargateVersion() { return stargateVersion_; }
|
||||
// optional string restVersion = 1;
|
||||
public static final int RESTVERSION_FIELD_NUMBER = 1;
|
||||
private boolean hasRestVersion;
|
||||
private java.lang.String restVersion_ = "";
|
||||
public boolean hasRestVersion() { return hasRestVersion; }
|
||||
public java.lang.String getRestVersion() { return restVersion_; }
|
||||
|
||||
// optional string jvmVersion = 2;
|
||||
public static final int JVMVERSION_FIELD_NUMBER = 2;
|
||||
|
@ -79,8 +79,8 @@ public final class VersionMessage {
|
|||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (hasStargateVersion()) {
|
||||
output.writeString(1, getStargateVersion());
|
||||
if (hasRestVersion()) {
|
||||
output.writeString(1, getRestVersion());
|
||||
}
|
||||
if (hasJvmVersion()) {
|
||||
output.writeString(2, getJvmVersion());
|
||||
|
@ -103,9 +103,9 @@ public final class VersionMessage {
|
|||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (hasStargateVersion()) {
|
||||
if (hasRestVersion()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeStringSize(1, getStargateVersion());
|
||||
.computeStringSize(1, getRestVersion());
|
||||
}
|
||||
if (hasJvmVersion()) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
|
@ -128,41 +128,41 @@ public final class VersionMessage {
|
|||
return size;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(byte[] data)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseDelimitedFrom(java.io.InputStream input)
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
|
@ -171,7 +171,7 @@ public final class VersionMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseDelimitedFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -182,12 +182,12 @@ public final class VersionMessage {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
|
@ -197,25 +197,25 @@ public final class VersionMessage {
|
|||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version prototype) {
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder> {
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version result;
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version result;
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.newBuilder()
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.newBuilder()
|
||||
private Builder() {}
|
||||
|
||||
private static Builder create() {
|
||||
Builder builder = new Builder();
|
||||
builder.result = new org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version();
|
||||
builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version internalGetResult() {
|
||||
protected org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version internalGetResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -224,7 +224,7 @@ public final class VersionMessage {
|
|||
throw new IllegalStateException(
|
||||
"Cannot call clear() after build().");
|
||||
}
|
||||
result = new org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version();
|
||||
result = new org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -234,24 +234,24 @@ public final class VersionMessage {
|
|||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.getDescriptor();
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.getDefaultInstance();
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.getDefaultInstance();
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return result.isInitialized();
|
||||
}
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version build() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version build() {
|
||||
if (result != null && !isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version buildParsed()
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
|
@ -260,29 +260,29 @@ public final class VersionMessage {
|
|||
return buildPartial();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version buildPartial() {
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version buildPartial() {
|
||||
if (result == null) {
|
||||
throw new IllegalStateException(
|
||||
"build() has already been called on this Builder.");
|
||||
}
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version returnMe = result;
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version returnMe = result;
|
||||
result = null;
|
||||
return returnMe;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version)other);
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version other) {
|
||||
if (other == org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.getDefaultInstance()) return this;
|
||||
if (other.hasStargateVersion()) {
|
||||
setStargateVersion(other.getStargateVersion());
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.getDefaultInstance()) return this;
|
||||
if (other.hasRestVersion()) {
|
||||
setRestVersion(other.getRestVersion());
|
||||
}
|
||||
if (other.hasJvmVersion()) {
|
||||
setJvmVersion(other.getJvmVersion());
|
||||
|
@ -322,7 +322,7 @@ public final class VersionMessage {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
setStargateVersion(input.readString());
|
||||
setRestVersion(input.readString());
|
||||
break;
|
||||
}
|
||||
case 18: {
|
||||
|
@ -346,24 +346,24 @@ public final class VersionMessage {
|
|||
}
|
||||
|
||||
|
||||
// optional string stargateVersion = 1;
|
||||
public boolean hasStargateVersion() {
|
||||
return result.hasStargateVersion();
|
||||
// optional string restVersion = 1;
|
||||
public boolean hasRestVersion() {
|
||||
return result.hasRestVersion();
|
||||
}
|
||||
public java.lang.String getStargateVersion() {
|
||||
return result.getStargateVersion();
|
||||
public java.lang.String getRestVersion() {
|
||||
return result.getRestVersion();
|
||||
}
|
||||
public Builder setStargateVersion(java.lang.String value) {
|
||||
public Builder setRestVersion(java.lang.String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
result.hasStargateVersion = true;
|
||||
result.stargateVersion_ = value;
|
||||
result.hasRestVersion = true;
|
||||
result.restVersion_ = value;
|
||||
return this;
|
||||
}
|
||||
public Builder clearStargateVersion() {
|
||||
result.hasStargateVersion = false;
|
||||
result.stargateVersion_ = getDefaultInstance().getStargateVersion();
|
||||
public Builder clearRestVersion() {
|
||||
result.hasRestVersion = false;
|
||||
result.restVersion_ = getDefaultInstance().getRestVersion();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -451,23 +451,23 @@ public final class VersionMessage {
|
|||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Version)
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Version)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new Version(true);
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.internalForceInit();
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.internalForceInit();
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.stargate.protobuf.generated.Version)
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Version)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_descriptor;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_fieldAccessorTable;
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -477,25 +477,25 @@ public final class VersionMessage {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024VersionMessage.proto\0223org.apache.hadoo" +
|
||||
"p.hbase.stargate.protobuf.generated\"w\n\007V" +
|
||||
"ersion\022\027\n\017stargateVersion\030\001 \001(\t\022\022\n\njvmVe" +
|
||||
"rsion\030\002 \001(\t\022\021\n\tosVersion\030\003 \001(\t\022\025\n\rserver" +
|
||||
"Version\030\004 \001(\t\022\025\n\rjerseyVersion\030\005 \001(\t"
|
||||
"\n\024VersionMessage.proto\022/org.apache.hadoo" +
|
||||
"p.hbase.rest.protobuf.generated\"s\n\007Versi" +
|
||||
"on\022\023\n\013restVersion\030\001 \001(\t\022\022\n\njvmVersion\030\002 " +
|
||||
"\001(\t\022\021\n\tosVersion\030\003 \001(\t\022\025\n\rserverVersion\030" +
|
||||
"\004 \001(\t\022\025\n\rjerseyVersion\030\005 \001(\t"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_descriptor =
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_fieldAccessorTable = new
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_stargate_protobuf_generated_Version_descriptor,
|
||||
new java.lang.String[] { "StargateVersion", "JvmVersion", "OsVersion", "ServerVersion", "JerseyVersion", },
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.class,
|
||||
org.apache.hadoop.hbase.stargate.protobuf.generated.VersionMessage.Version.Builder.class);
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Version_descriptor,
|
||||
new java.lang.String[] { "RestVersion", "JvmVersion", "OsVersion", "ServerVersion", "JerseyVersion", },
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.provider;
|
||||
package org.apache.hadoop.hbase.rest.provider;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
@ -28,19 +28,19 @@ import javax.ws.rs.ext.ContextResolver;
|
|||
import javax.ws.rs.ext.Provider;
|
||||
import javax.xml.bind.JAXBContext;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.ColumnSchemaModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.StorageClusterStatusModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.StorageClusterVersionModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableListModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableRegionModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.VersionModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel;
|
||||
import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableListModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableRegionModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.model.VersionModel;
|
||||
|
||||
import com.sun.jersey.api.json.JSONConfiguration;
|
||||
import com.sun.jersey.api.json.JSONJAXBContext;
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.provider.consumer;
|
||||
package org.apache.hadoop.hbase.rest.provider.consumer;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -35,8 +35,8 @@ import javax.ws.rs.ext.Provider;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.Constants;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
|
||||
/**
|
||||
* Adapter for hooking up Jersey content processing dispatch to
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.provider.producer;
|
||||
package org.apache.hadoop.hbase.rest.provider.producer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
@ -34,7 +34,7 @@ import javax.ws.rs.core.MultivaluedMap;
|
|||
import javax.ws.rs.ext.MessageBodyWriter;
|
||||
import javax.ws.rs.ext.Provider;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.rest.Constants;
|
||||
|
||||
/**
|
||||
* An adapter between Jersey and Object.toString(). Hooks up plain text output
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.provider.producer;
|
||||
package org.apache.hadoop.hbase.rest.provider.producer;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -35,8 +35,8 @@ import javax.ws.rs.core.MultivaluedMap;
|
|||
import javax.ws.rs.ext.MessageBodyWriter;
|
||||
import javax.ws.rs.ext.Provider;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.Constants;
|
||||
import org.apache.hadoop.hbase.stargate.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.Constants;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
|
||||
/**
|
||||
* An adapter between Jersey and ProtobufMessageHandler implementors. Hooks up
|
|
@ -16,7 +16,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message Cell {
|
||||
optional bytes row = 1; // unused if Cell is in a CellSet
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
import "CellMessage.proto";
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message CellSet {
|
||||
message Row {
|
|
@ -16,7 +16,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message ColumnSchema {
|
||||
optional string name = 1;
|
|
@ -16,7 +16,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message Scanner {
|
||||
optional bytes startRow = 1;
|
|
@ -16,7 +16,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message StorageClusterStatus {
|
||||
message Region {
|
|
@ -16,7 +16,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message TableInfo {
|
||||
required string name = 1;
|
|
@ -16,7 +16,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message TableList {
|
||||
repeated string name = 1;
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
import "ColumnSchemaMessage.proto";
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message TableSchema {
|
||||
optional string name = 1;
|
|
@ -16,10 +16,10 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package org.apache.hadoop.hbase.stargate.protobuf.generated;
|
||||
package org.apache.hadoop.hbase.rest.protobuf.generated;
|
||||
|
||||
message Version {
|
||||
optional string stargateVersion = 1;
|
||||
optional string restVersion = 1;
|
||||
optional string jvmVersion = 2;
|
||||
optional string osVersion = 3;
|
||||
optional string serverVersion = 4;
|
|
@ -0,0 +1,74 @@
|
|||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseClusterTestCase;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.mortbay.jetty.Server;
|
||||
import org.mortbay.jetty.servlet.Context;
|
||||
import org.mortbay.jetty.servlet.ServletHolder;
|
||||
|
||||
import com.sun.jersey.spi.container.servlet.ServletContainer;
|
||||
|
||||
public class HBaseRESTClusterTestBase extends HBaseClusterTestCase
|
||||
implements Constants {
|
||||
|
||||
static final Log LOG =
|
||||
LogFactory.getLog(HBaseRESTClusterTestBase.class);
|
||||
|
||||
// use a nonstandard port
|
||||
static final int DEFAULT_TEST_PORT = 38080;
|
||||
|
||||
protected int testServletPort;
|
||||
Server server;
|
||||
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
startServletContainer();
|
||||
}
|
||||
|
||||
protected void tearDown() throws Exception {
|
||||
stopServletContainer();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
private void startServletContainer() throws Exception {
|
||||
if (server != null) {
|
||||
LOG.error("ServletContainer already running");
|
||||
return;
|
||||
}
|
||||
|
||||
// set up the Jersey servlet container for Jetty
|
||||
ServletHolder sh = new ServletHolder(ServletContainer.class);
|
||||
sh.setInitParameter(
|
||||
"com.sun.jersey.config.property.resourceConfigClass",
|
||||
ResourceConfig.class.getCanonicalName());
|
||||
sh.setInitParameter("com.sun.jersey.config.property.packages",
|
||||
"jetty");
|
||||
|
||||
LOG.info("configured " + ServletContainer.class.getName());
|
||||
|
||||
// set up Jetty and run the embedded server
|
||||
testServletPort = conf.getInt("hbase.rest.port", DEFAULT_TEST_PORT);
|
||||
server = new Server(testServletPort);
|
||||
server.setSendServerVersion(false);
|
||||
server.setSendDateHeader(false);
|
||||
// set up context
|
||||
Context context = new Context(server, "/", Context.SESSIONS);
|
||||
context.addServlet(sh, "/*");
|
||||
// start the server
|
||||
server.start();
|
||||
|
||||
LOG.info("started " + server.getClass().getName() + " on port " +
|
||||
testServletPort);
|
||||
}
|
||||
|
||||
private void stopServletContainer() {
|
||||
if (server != null) try {
|
||||
server.stop();
|
||||
server = null;
|
||||
} catch (Exception e) {
|
||||
LOG.warn(StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
|
@ -56,10 +56,10 @@ import org.apache.hadoop.hbase.filter.Filter;
|
|||
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter;
|
||||
import org.apache.hadoop.hbase.filter.BinaryComparator;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.RemoteAdmin;
|
||||
import org.apache.hadoop.hbase.stargate.client.RemoteHTable;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.RemoteAdmin;
|
||||
import org.apache.hadoop.hbase.rest.client.RemoteHTable;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Hash;
|
||||
import org.apache.hadoop.hbase.util.MurmurHash;
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -36,15 +36,16 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.Response;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestRowResource extends MiniClusterTestBase {
|
||||
public class TestRowResource extends HBaseRESTClusterTestBase {
|
||||
|
||||
private static final String TABLE = "TestRowResource";
|
||||
private static final String COLUMN_1 = "a:";
|
||||
private static final String COLUMN_2 = "b:";
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -38,16 +38,17 @@ import org.apache.hadoop.hbase.KeyValue;
|
|||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.Response;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestScannerResource extends MiniClusterTestBase {
|
||||
public class TestScannerResource extends HBaseRESTClusterTestBase {
|
||||
|
||||
private static final String TABLE = "TestScannerResource";
|
||||
private static final String COLUMN_1 = "a:";
|
||||
private static final String COLUMN_2 = "b:";
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.StringWriter;
|
||||
|
@ -58,16 +58,16 @@ import org.apache.hadoop.hbase.filter.SubstringComparator;
|
|||
import org.apache.hadoop.hbase.filter.ValueFilter;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.RowModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.Response;
|
||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestScannersWithFilters extends MiniClusterTestBase {
|
||||
public class TestScannersWithFilters extends HBaseRESTClusterTestBase {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestScannersWithFilters.class);
|
||||
|
@ -975,5 +975,4 @@ public class TestScannersWithFilters extends MiniClusterTestBase {
|
|||
};
|
||||
verifyScanFull(s, kvs);
|
||||
}
|
||||
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -28,15 +28,15 @@ import javax.xml.bind.JAXBContext;
|
|||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.ColumnSchemaModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TestTableSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.Response;
|
||||
import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TestTableSchemaModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestSchemaResource extends MiniClusterTestBase {
|
||||
public class TestSchemaResource extends HBaseRESTClusterTestBase {
|
||||
private Client client;
|
||||
private JAXBContext context;
|
||||
private HBaseAdmin admin;
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -26,13 +26,13 @@ import java.io.IOException;
|
|||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.StorageClusterStatusModel;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.Response;
|
||||
import org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestStatusResource extends MiniClusterTestBase {
|
||||
public class TestStatusResource extends HBaseRESTClusterTestBase {
|
||||
private static final byte[] ROOT_REGION_NAME = Bytes.toBytes("-ROOT-,,0");
|
||||
private static final byte[] META_REGION_NAME = Bytes.toBytes(".META.,,1");
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.stargate;
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -32,16 +32,16 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
|||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.stargate.client.Client;
|
||||
import org.apache.hadoop.hbase.stargate.client.Cluster;
|
||||
import org.apache.hadoop.hbase.stargate.client.Response;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableListModel;
|
||||
import org.apache.hadoop.hbase.stargate.model.TableRegionModel;
|
||||
import org.apache.hadoop.hbase.rest.client.Client;
|
||||
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||
import org.apache.hadoop.hbase.rest.client.Response;
|
||||
import org.apache.hadoop.hbase.rest.model.TableModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableListModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableRegionModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
public class TestTableResource extends MiniClusterTestBase {
|
||||
public class TestTableResource extends HBaseRESTClusterTestBase {
|
||||
private static String TABLE = "TestTableResource";
|
||||
private static String COLUMN = "test:";
|
||||
|
||||
|
@ -94,7 +94,7 @@ public class TestTableResource extends MiniClusterTestBase {
|
|||
}
|
||||
|
||||
public void testTableListText() throws IOException {
|
||||
Response response = client.get("/", MIMETYPE_PLAIN);
|
||||
Response response = client.get("/", MIMETYPE_TEXT);
|
||||
assertEquals(response.getCode(), 200);
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue