HBASE-4299 Update to Avro 1.5.3 and use Avro Maven plugin to generate Avro classes.
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1164762 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8efece3ba5
commit
d0e61e30f1
|
@ -17,6 +17,8 @@ Release 0.91.0 - Unreleased
|
|||
HBASE-4197 RegionServer expects all scanner to be subclasses of
|
||||
HRegion.RegionScanner (Lars Hofhansl)
|
||||
HBASE-4233 Update protobuf dependency to 2.4.0a (todd)
|
||||
HBASE-4299 Update to Avro 1.5.3 and use Avro Maven plugin to generate
|
||||
Avro classes. (Alejandro Abdelnur)
|
||||
|
||||
BUG FIXES
|
||||
HBASE-3280 YouAreDeadException being swallowed in HRS getMaster
|
||||
|
|
55
pom.xml
55
pom.xml
|
@ -270,6 +270,16 @@
|
|||
<artifactId>maven-surefire-report-plugin</artifactId>
|
||||
<version>2.9</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro-maven-plugin</artifactId>
|
||||
<version>${avro.version}</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<version>1.5</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
|
||||
|
@ -289,6 +299,23 @@
|
|||
</resources>
|
||||
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-avro-sources</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>schema</goal>
|
||||
<goal>protocol</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>xml-maven-plugin</artifactId>
|
||||
|
@ -475,18 +502,18 @@
|
|||
|
||||
<mkdir dir="${build.webapps}/master/WEB-INF"/>
|
||||
<jspcompiler uriroot="${src.webapps}/master"
|
||||
outputdir="${generated.sources}"
|
||||
outputdir="${generated.sources}/java"
|
||||
package="org.apache.hadoop.hbase.generated.master"
|
||||
webxml="${build.webapps}/master/WEB-INF/web.xml"/>
|
||||
|
||||
<mkdir dir="${build.webapps}/regionserver/WEB-INF"/>
|
||||
<jspcompiler uriroot="${src.webapps}/regionserver"
|
||||
outputdir="${generated.sources}"
|
||||
outputdir="${generated.sources}/java"
|
||||
package="org.apache.hadoop.hbase.generated.regionserver"
|
||||
webxml="${build.webapps}/regionserver/WEB-INF/web.xml"/>
|
||||
|
||||
<exec executable="sh">
|
||||
<arg line="${basedir}/src/saveVersion.sh ${project.version} ${generated.sources}"/>
|
||||
<arg line="${basedir}/src/saveVersion.sh ${project.version} ${generated.sources}/java"/>
|
||||
</exec>
|
||||
</tasks>
|
||||
</configuration>
|
||||
|
@ -552,7 +579,7 @@
|
|||
<version>1.5</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>add-jspc-source</id>
|
||||
<id>jspcSource-packageInfo-Avro-source</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>add-source</goal>
|
||||
|
@ -560,18 +587,7 @@
|
|||
<configuration>
|
||||
<sources>
|
||||
<source>${basedir}/target/jspc</source>
|
||||
</sources>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>add-package-info</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>add-source</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sources>
|
||||
<source>${project.build.directory}/generated-sources</source>
|
||||
<source>${project.build.directory}/generated-sources/java</source>
|
||||
</sources>
|
||||
</configuration>
|
||||
</execution>
|
||||
|
@ -625,7 +641,7 @@
|
|||
<compileSource>1.6</compileSource>
|
||||
|
||||
<!-- Dependencies -->
|
||||
<avro.version>1.4.1</avro.version>
|
||||
<avro.version>1.5.3</avro.version>
|
||||
<commons-cli.version>1.2</commons-cli.version>
|
||||
<commons-codec.version>1.4</commons-codec.version>
|
||||
<commons-httpclient.version>3.1</commons-httpclient.version><!-- pretty outdated -->
|
||||
|
@ -731,6 +747,11 @@
|
|||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro-ipc</artifactId>
|
||||
<version>${avro.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.zookeeper</groupId>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.avro.Schema;
|
|||
import org.apache.avro.generic.GenericArray;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
import org.apache.avro.ipc.HttpServer;
|
||||
import org.apache.avro.specific.SpecificResponder;
|
||||
import org.apache.avro.ipc.specific.SpecificResponder;
|
||||
import org.apache.avro.util.Utf8;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AAlreadyExists extends org.apache.avro.specific.SpecificExceptionBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"error\",\"name\":\"AAlreadyExists\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"message\",\"type\":\"string\"}]}");
|
||||
public org.apache.avro.util.Utf8 message;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return message;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: message = (org.apache.avro.util.Utf8)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AClusterStatus extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AClusterStatus\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"averageLoad\",\"type\":\"double\"},{\"name\":\"deadServerNames\",\"type\":{\"type\":\"array\",\"items\":\"string\"}},{\"name\":\"deadServers\",\"type\":\"int\"},{\"name\":\"hbaseVersion\",\"type\":\"string\"},{\"name\":\"regionsCount\",\"type\":\"int\"},{\"name\":\"requestsCount\",\"type\":\"int\"},{\"name\":\"serverInfos\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AServerInfo\",\"fields\":[{\"name\":\"infoPort\",\"type\":\"int\"},{\"name\":\"load\",\"type\":{\"type\":\"record\",\"name\":\"AServerLoad\",\"fields\":[{\"name\":\"load\",\"type\":\"int\"},{\"name\":\"maxHeapMB\",\"type\":\"int\"},{\"name\":\"memStoreSizeInMB\",\"type\":\"int\"},{\"name\":\"numberOfRegions\",\"type\":\"int\"},{\"name\":\"numberOfRequests\",\"type\":\"int\"},{\"name\":\"regionsLoad\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"ARegionLoad\",\"fields\":[{\"name\":\"memStoreSizeMB\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"storefileIndexSizeMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeMB\",\"type\":\"int\"},{\"name\":\"stores\",\"type\":\"int\"}]}}},{\"name\":\"storefileIndexSizeInMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeInMB\",\"type\":\"int\"},{\"name\":\"usedHeapMB\",\"type\":\"int\"}]}},{\"name\":\"serverAddress\",\"type\":{\"type\":\"record\",\"name\":\"AServerAddress\",\"fields\":[{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"inetSocketAddress\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"}]}},{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"startCode\",\"type\":\"long\"}]}}},{\"name\":\"servers\",\"type\":\"int\"}]}");
|
||||
public double averageLoad;
|
||||
public java.util.List<java.lang.CharSequence> deadServerNames;
|
||||
public int deadServers;
|
||||
public java.lang.CharSequence hbaseVersion;
|
||||
public int regionsCount;
|
||||
public int requestsCount;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AServerInfo> serverInfos;
|
||||
public int servers;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return averageLoad;
|
||||
case 1: return deadServerNames;
|
||||
case 2: return deadServers;
|
||||
case 3: return hbaseVersion;
|
||||
case 4: return regionsCount;
|
||||
case 5: return requestsCount;
|
||||
case 6: return serverInfos;
|
||||
case 7: return servers;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: averageLoad = (java.lang.Double)value$; break;
|
||||
case 1: deadServerNames = (java.util.List<java.lang.CharSequence>)value$; break;
|
||||
case 2: deadServers = (java.lang.Integer)value$; break;
|
||||
case 3: hbaseVersion = (java.lang.CharSequence)value$; break;
|
||||
case 4: regionsCount = (java.lang.Integer)value$; break;
|
||||
case 5: requestsCount = (java.lang.Integer)value$; break;
|
||||
case 6: serverInfos = (java.util.List<org.apache.hadoop.hbase.avro.generated.AServerInfo>)value$; break;
|
||||
case 7: servers = (java.lang.Integer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AColumn extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AColumn\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":[\"bytes\",\"null\"]}]}");
|
||||
public java.nio.ByteBuffer family;
|
||||
public java.nio.ByteBuffer qualifier;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return family;
|
||||
case 1: return qualifier;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: family = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: qualifier = (java.nio.ByteBuffer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AColumnFamilyDescriptor extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AColumnFamilyDescriptor\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"compression\",\"type\":{\"type\":\"enum\",\"name\":\"ACompressionAlgorithm\",\"symbols\":[\"LZO\",\"GZ\",\"NONE\"]}},{\"name\":\"maxVersions\",\"type\":\"int\"},{\"name\":\"blocksize\",\"type\":\"int\"},{\"name\":\"inMemory\",\"type\":\"boolean\"},{\"name\":\"timeToLive\",\"type\":\"int\"},{\"name\":\"blockCacheEnabled\",\"type\":\"boolean\"},{\"name\":\"bloomfilterEnabled\",\"type\":\"boolean\"}]}");
|
||||
public java.nio.ByteBuffer name;
|
||||
public org.apache.hadoop.hbase.avro.generated.ACompressionAlgorithm compression;
|
||||
public int maxVersions;
|
||||
public int blocksize;
|
||||
public boolean inMemory;
|
||||
public int timeToLive;
|
||||
public boolean blockCacheEnabled;
|
||||
public boolean bloomfilterEnabled;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return name;
|
||||
case 1: return compression;
|
||||
case 2: return maxVersions;
|
||||
case 3: return blocksize;
|
||||
case 4: return inMemory;
|
||||
case 5: return timeToLive;
|
||||
case 6: return blockCacheEnabled;
|
||||
case 7: return bloomfilterEnabled;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: name = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: compression = (org.apache.hadoop.hbase.avro.generated.ACompressionAlgorithm)value$; break;
|
||||
case 2: maxVersions = (java.lang.Integer)value$; break;
|
||||
case 3: blocksize = (java.lang.Integer)value$; break;
|
||||
case 4: inMemory = (java.lang.Boolean)value$; break;
|
||||
case 5: timeToLive = (java.lang.Integer)value$; break;
|
||||
case 6: blockCacheEnabled = (java.lang.Boolean)value$; break;
|
||||
case 7: bloomfilterEnabled = (java.lang.Boolean)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AColumnValue extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AColumnValue\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":\"bytes\"},{\"name\":\"value\",\"type\":\"bytes\"},{\"name\":\"timestamp\",\"type\":[\"long\",\"null\"]}]}");
|
||||
public java.nio.ByteBuffer family;
|
||||
public java.nio.ByteBuffer qualifier;
|
||||
public java.nio.ByteBuffer value;
|
||||
public java.lang.Long timestamp;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return family;
|
||||
case 1: return qualifier;
|
||||
case 2: return value;
|
||||
case 3: return timestamp;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: family = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: qualifier = (java.nio.ByteBuffer)value$; break;
|
||||
case 2: value = (java.nio.ByteBuffer)value$; break;
|
||||
case 3: timestamp = (java.lang.Long)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public enum ACompressionAlgorithm {
|
||||
LZO, GZ, NONE
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class ADelete extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"ADelete\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"row\",\"type\":\"bytes\"},{\"name\":\"columns\",\"type\":[{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AColumn\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":[\"bytes\",\"null\"]}]}},\"null\"]}]}");
|
||||
public java.nio.ByteBuffer row;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AColumn> columns;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return row;
|
||||
case 1: return columns;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: row = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: columns = (java.util.List<org.apache.hadoop.hbase.avro.generated.AColumn>)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AFamilyDescriptor extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AFamilyDescriptor\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"compression\",\"type\":[{\"type\":\"enum\",\"name\":\"ACompressionAlgorithm\",\"symbols\":[\"LZO\",\"GZ\",\"NONE\"]},\"null\"]},{\"name\":\"maxVersions\",\"type\":[\"int\",\"null\"]},{\"name\":\"blocksize\",\"type\":[\"int\",\"null\"]},{\"name\":\"inMemory\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"timeToLive\",\"type\":[\"int\",\"null\"]},{\"name\":\"blockCacheEnabled\",\"type\":[\"boolean\",\"null\"]}]}");
|
||||
public java.nio.ByteBuffer name;
|
||||
public org.apache.hadoop.hbase.avro.generated.ACompressionAlgorithm compression;
|
||||
public java.lang.Integer maxVersions;
|
||||
public java.lang.Integer blocksize;
|
||||
public java.lang.Boolean inMemory;
|
||||
public java.lang.Integer timeToLive;
|
||||
public java.lang.Boolean blockCacheEnabled;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return name;
|
||||
case 1: return compression;
|
||||
case 2: return maxVersions;
|
||||
case 3: return blocksize;
|
||||
case 4: return inMemory;
|
||||
case 5: return timeToLive;
|
||||
case 6: return blockCacheEnabled;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: name = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: compression = (org.apache.hadoop.hbase.avro.generated.ACompressionAlgorithm)value$; break;
|
||||
case 2: maxVersions = (java.lang.Integer)value$; break;
|
||||
case 3: blocksize = (java.lang.Integer)value$; break;
|
||||
case 4: inMemory = (java.lang.Boolean)value$; break;
|
||||
case 5: timeToLive = (java.lang.Integer)value$; break;
|
||||
case 6: blockCacheEnabled = (java.lang.Boolean)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AGet extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AGet\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"row\",\"type\":\"bytes\"},{\"name\":\"columns\",\"type\":[{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AColumn\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":[\"bytes\",\"null\"]}]}},\"null\"]},{\"name\":\"timestamp\",\"type\":[\"long\",\"null\"]},{\"name\":\"timerange\",\"type\":[{\"type\":\"record\",\"name\":\"ATimeRange\",\"fields\":[{\"name\":\"minStamp\",\"type\":\"long\"},{\"name\":\"maxStamp\",\"type\":\"long\"}]},\"null\"]},{\"name\":\"maxVersions\",\"type\":[\"int\",\"null\"]}]}");
|
||||
public java.nio.ByteBuffer row;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AColumn> columns;
|
||||
public java.lang.Long timestamp;
|
||||
public org.apache.hadoop.hbase.avro.generated.ATimeRange timerange;
|
||||
public java.lang.Integer maxVersions;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return row;
|
||||
case 1: return columns;
|
||||
case 2: return timestamp;
|
||||
case 3: return timerange;
|
||||
case 4: return maxVersions;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: row = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: columns = (java.util.List<org.apache.hadoop.hbase.avro.generated.AColumn>)value$; break;
|
||||
case 2: timestamp = (java.lang.Long)value$; break;
|
||||
case 3: timerange = (org.apache.hadoop.hbase.avro.generated.ATimeRange)value$; break;
|
||||
case 4: maxVersions = (java.lang.Integer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AIOError extends org.apache.avro.specific.SpecificExceptionBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"error\",\"name\":\"AIOError\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"message\",\"type\":\"string\"}]}");
|
||||
public java.lang.CharSequence message;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return message;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: message = (java.lang.CharSequence)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AIllegalArgument extends org.apache.avro.specific.SpecificExceptionBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"error\",\"name\":\"AIllegalArgument\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"message\",\"type\":\"string\"}]}");
|
||||
public java.lang.CharSequence message;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return message;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: message = (java.lang.CharSequence)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AMasterNotRunning extends org.apache.avro.specific.SpecificExceptionBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"error\",\"name\":\"AMasterNotRunning\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"message\",\"type\":\"string\"}]}");
|
||||
public java.lang.CharSequence message;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return message;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: message = (java.lang.CharSequence)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class APut extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"APut\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"row\",\"type\":\"bytes\"},{\"name\":\"columnValues\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AColumnValue\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":\"bytes\"},{\"name\":\"value\",\"type\":\"bytes\"},{\"name\":\"timestamp\",\"type\":[\"long\",\"null\"]}]}}}]}");
|
||||
public java.nio.ByteBuffer row;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AColumnValue> columnValues;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return row;
|
||||
case 1: return columnValues;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: row = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: columnValues = (java.util.List<org.apache.hadoop.hbase.avro.generated.AColumnValue>)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class ARegionLoad extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"ARegionLoad\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"memStoreSizeMB\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"storefileIndexSizeMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeMB\",\"type\":\"int\"},{\"name\":\"stores\",\"type\":\"int\"}]}");
|
||||
public int memStoreSizeMB;
|
||||
public java.nio.ByteBuffer name;
|
||||
public int storefileIndexSizeMB;
|
||||
public int storefiles;
|
||||
public int storefileSizeMB;
|
||||
public int stores;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return memStoreSizeMB;
|
||||
case 1: return name;
|
||||
case 2: return storefileIndexSizeMB;
|
||||
case 3: return storefiles;
|
||||
case 4: return storefileSizeMB;
|
||||
case 5: return stores;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: memStoreSizeMB = (java.lang.Integer)value$; break;
|
||||
case 1: name = (java.nio.ByteBuffer)value$; break;
|
||||
case 2: storefileIndexSizeMB = (java.lang.Integer)value$; break;
|
||||
case 3: storefiles = (java.lang.Integer)value$; break;
|
||||
case 4: storefileSizeMB = (java.lang.Integer)value$; break;
|
||||
case 5: stores = (java.lang.Integer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AResult extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AResult\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"row\",\"type\":\"bytes\"},{\"name\":\"entries\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AResultEntry\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":\"bytes\"},{\"name\":\"value\",\"type\":\"bytes\"},{\"name\":\"timestamp\",\"type\":\"long\"}]}}}]}");
|
||||
public java.nio.ByteBuffer row;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AResultEntry> entries;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return row;
|
||||
case 1: return entries;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: row = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: entries = (java.util.List<org.apache.hadoop.hbase.avro.generated.AResultEntry>)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AResultEntry extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AResultEntry\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":\"bytes\"},{\"name\":\"value\",\"type\":\"bytes\"},{\"name\":\"timestamp\",\"type\":\"long\"}]}");
|
||||
public java.nio.ByteBuffer family;
|
||||
public java.nio.ByteBuffer qualifier;
|
||||
public java.nio.ByteBuffer value;
|
||||
public long timestamp;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return family;
|
||||
case 1: return qualifier;
|
||||
case 2: return value;
|
||||
case 3: return timestamp;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: family = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: qualifier = (java.nio.ByteBuffer)value$; break;
|
||||
case 2: value = (java.nio.ByteBuffer)value$; break;
|
||||
case 3: timestamp = (java.lang.Long)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AScan extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AScan\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"startRow\",\"type\":[\"bytes\",\"null\"]},{\"name\":\"stopRow\",\"type\":[\"bytes\",\"null\"]},{\"name\":\"columns\",\"type\":[{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AColumn\",\"fields\":[{\"name\":\"family\",\"type\":\"bytes\"},{\"name\":\"qualifier\",\"type\":[\"bytes\",\"null\"]}]}},\"null\"]},{\"name\":\"timestamp\",\"type\":[\"long\",\"null\"]},{\"name\":\"timerange\",\"type\":[{\"type\":\"record\",\"name\":\"ATimeRange\",\"fields\":[{\"name\":\"minStamp\",\"type\":\"long\"},{\"name\":\"maxStamp\",\"type\":\"long\"}]},\"null\"]},{\"name\":\"maxVersions\",\"type\":[\"int\",\"null\"]}]}");
|
||||
public java.nio.ByteBuffer startRow;
|
||||
public java.nio.ByteBuffer stopRow;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AColumn> columns;
|
||||
public java.lang.Long timestamp;
|
||||
public org.apache.hadoop.hbase.avro.generated.ATimeRange timerange;
|
||||
public java.lang.Integer maxVersions;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return startRow;
|
||||
case 1: return stopRow;
|
||||
case 2: return columns;
|
||||
case 3: return timestamp;
|
||||
case 4: return timerange;
|
||||
case 5: return maxVersions;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: startRow = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: stopRow = (java.nio.ByteBuffer)value$; break;
|
||||
case 2: columns = (java.util.List<org.apache.hadoop.hbase.avro.generated.AColumn>)value$; break;
|
||||
case 3: timestamp = (java.lang.Long)value$; break;
|
||||
case 4: timerange = (org.apache.hadoop.hbase.avro.generated.ATimeRange)value$; break;
|
||||
case 5: maxVersions = (java.lang.Integer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AServerAddress extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AServerAddress\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"inetSocketAddress\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"}]}");
|
||||
public java.lang.CharSequence hostname;
|
||||
public java.lang.CharSequence inetSocketAddress;
|
||||
public int port;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return hostname;
|
||||
case 1: return inetSocketAddress;
|
||||
case 2: return port;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: hostname = (java.lang.CharSequence)value$; break;
|
||||
case 1: inetSocketAddress = (java.lang.CharSequence)value$; break;
|
||||
case 2: port = (java.lang.Integer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AServerInfo extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AServerInfo\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"infoPort\",\"type\":\"int\"},{\"name\":\"load\",\"type\":{\"type\":\"record\",\"name\":\"AServerLoad\",\"fields\":[{\"name\":\"load\",\"type\":\"int\"},{\"name\":\"maxHeapMB\",\"type\":\"int\"},{\"name\":\"memStoreSizeInMB\",\"type\":\"int\"},{\"name\":\"numberOfRegions\",\"type\":\"int\"},{\"name\":\"numberOfRequests\",\"type\":\"int\"},{\"name\":\"regionsLoad\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"ARegionLoad\",\"fields\":[{\"name\":\"memStoreSizeMB\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"storefileIndexSizeMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeMB\",\"type\":\"int\"},{\"name\":\"stores\",\"type\":\"int\"}]}}},{\"name\":\"storefileIndexSizeInMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeInMB\",\"type\":\"int\"},{\"name\":\"usedHeapMB\",\"type\":\"int\"}]}},{\"name\":\"serverAddress\",\"type\":{\"type\":\"record\",\"name\":\"AServerAddress\",\"fields\":[{\"name\":\"hostname\",\"type\":\"string\"},{\"name\":\"inetSocketAddress\",\"type\":\"string\"},{\"name\":\"port\",\"type\":\"int\"}]}},{\"name\":\"serverName\",\"type\":\"string\"},{\"name\":\"startCode\",\"type\":\"long\"}]}");
|
||||
public int infoPort;
|
||||
public org.apache.hadoop.hbase.avro.generated.AServerLoad load;
|
||||
public org.apache.hadoop.hbase.avro.generated.AServerAddress serverAddress;
|
||||
public java.lang.CharSequence serverName;
|
||||
public long startCode;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return infoPort;
|
||||
case 1: return load;
|
||||
case 2: return serverAddress;
|
||||
case 3: return serverName;
|
||||
case 4: return startCode;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: infoPort = (java.lang.Integer)value$; break;
|
||||
case 1: load = (org.apache.hadoop.hbase.avro.generated.AServerLoad)value$; break;
|
||||
case 2: serverAddress = (org.apache.hadoop.hbase.avro.generated.AServerAddress)value$; break;
|
||||
case 3: serverName = (java.lang.CharSequence)value$; break;
|
||||
case 4: startCode = (java.lang.Long)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class AServerLoad extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AServerLoad\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"load\",\"type\":\"int\"},{\"name\":\"maxHeapMB\",\"type\":\"int\"},{\"name\":\"memStoreSizeInMB\",\"type\":\"int\"},{\"name\":\"numberOfRegions\",\"type\":\"int\"},{\"name\":\"numberOfRequests\",\"type\":\"int\"},{\"name\":\"regionsLoad\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"ARegionLoad\",\"fields\":[{\"name\":\"memStoreSizeMB\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"storefileIndexSizeMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeMB\",\"type\":\"int\"},{\"name\":\"stores\",\"type\":\"int\"}]}}},{\"name\":\"storefileIndexSizeInMB\",\"type\":\"int\"},{\"name\":\"storefiles\",\"type\":\"int\"},{\"name\":\"storefileSizeInMB\",\"type\":\"int\"},{\"name\":\"usedHeapMB\",\"type\":\"int\"}]}");
|
||||
public int load;
|
||||
public int maxHeapMB;
|
||||
public int memStoreSizeInMB;
|
||||
public int numberOfRegions;
|
||||
public int numberOfRequests;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.ARegionLoad> regionsLoad;
|
||||
public int storefileIndexSizeInMB;
|
||||
public int storefiles;
|
||||
public int storefileSizeInMB;
|
||||
public int usedHeapMB;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return load;
|
||||
case 1: return maxHeapMB;
|
||||
case 2: return memStoreSizeInMB;
|
||||
case 3: return numberOfRegions;
|
||||
case 4: return numberOfRequests;
|
||||
case 5: return regionsLoad;
|
||||
case 6: return storefileIndexSizeInMB;
|
||||
case 7: return storefiles;
|
||||
case 8: return storefileSizeInMB;
|
||||
case 9: return usedHeapMB;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: load = (java.lang.Integer)value$; break;
|
||||
case 1: maxHeapMB = (java.lang.Integer)value$; break;
|
||||
case 2: memStoreSizeInMB = (java.lang.Integer)value$; break;
|
||||
case 3: numberOfRegions = (java.lang.Integer)value$; break;
|
||||
case 4: numberOfRequests = (java.lang.Integer)value$; break;
|
||||
case 5: regionsLoad = (java.util.List<org.apache.hadoop.hbase.avro.generated.ARegionLoad>)value$; break;
|
||||
case 6: storefileIndexSizeInMB = (java.lang.Integer)value$; break;
|
||||
case 7: storefiles = (java.lang.Integer)value$; break;
|
||||
case 8: storefileSizeInMB = (java.lang.Integer)value$; break;
|
||||
case 9: usedHeapMB = (java.lang.Integer)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class ATableDescriptor extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"ATableDescriptor\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"families\",\"type\":[{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AFamilyDescriptor\",\"fields\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"compression\",\"type\":[{\"type\":\"enum\",\"name\":\"ACompressionAlgorithm\",\"symbols\":[\"LZO\",\"GZ\",\"NONE\"]},\"null\"]},{\"name\":\"maxVersions\",\"type\":[\"int\",\"null\"]},{\"name\":\"blocksize\",\"type\":[\"int\",\"null\"]},{\"name\":\"inMemory\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"timeToLive\",\"type\":[\"int\",\"null\"]},{\"name\":\"blockCacheEnabled\",\"type\":[\"boolean\",\"null\"]}]}},\"null\"]},{\"name\":\"maxFileSize\",\"type\":[\"long\",\"null\"]},{\"name\":\"memStoreFlushSize\",\"type\":[\"long\",\"null\"]},{\"name\":\"rootRegion\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"metaRegion\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"metaTable\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"readOnly\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"deferredLogFlush\",\"type\":[\"boolean\",\"null\"]}]}");
|
||||
public java.nio.ByteBuffer name;
|
||||
public java.util.List<org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor> families;
|
||||
public java.lang.Long maxFileSize;
|
||||
public java.lang.Long memStoreFlushSize;
|
||||
public java.lang.Boolean rootRegion;
|
||||
public java.lang.Boolean metaRegion;
|
||||
public java.lang.Boolean metaTable;
|
||||
public java.lang.Boolean readOnly;
|
||||
public java.lang.Boolean deferredLogFlush;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return name;
|
||||
case 1: return families;
|
||||
case 2: return maxFileSize;
|
||||
case 3: return memStoreFlushSize;
|
||||
case 4: return rootRegion;
|
||||
case 5: return metaRegion;
|
||||
case 6: return metaTable;
|
||||
case 7: return readOnly;
|
||||
case 8: return deferredLogFlush;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: name = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: families = (java.util.List<org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor>)value$; break;
|
||||
case 2: maxFileSize = (java.lang.Long)value$; break;
|
||||
case 3: memStoreFlushSize = (java.lang.Long)value$; break;
|
||||
case 4: rootRegion = (java.lang.Boolean)value$; break;
|
||||
case 5: metaRegion = (java.lang.Boolean)value$; break;
|
||||
case 6: metaTable = (java.lang.Boolean)value$; break;
|
||||
case 7: readOnly = (java.lang.Boolean)value$; break;
|
||||
case 8: deferredLogFlush = (java.lang.Boolean)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class ATableExists extends org.apache.avro.specific.SpecificExceptionBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"error\",\"name\":\"ATableExists\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"message\",\"type\":\"string\"}]}");
|
||||
public java.lang.CharSequence message;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return message;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: message = (java.lang.CharSequence)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
/**
|
||||
* Autogenerated by Avro
|
||||
*
|
||||
* DO NOT EDIT DIRECTLY
|
||||
*/
|
||||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class ATimeRange extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"ATimeRange\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"minStamp\",\"type\":\"long\"},{\"name\":\"maxStamp\",\"type\":\"long\"}]}");
|
||||
public long minStamp;
|
||||
public long maxStamp;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
// Used by DatumWriter. Applications should not call.
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return minStamp;
|
||||
case 1: return maxStamp;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
// Used by DatumReader. Applications should not call.
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: minStamp = (java.lang.Long)value$; break;
|
||||
case 1: maxStamp = (java.lang.Long)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -1,21 +0,0 @@
|
|||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class IOError extends org.apache.avro.specific.SpecificExceptionBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"error\",\"name\":\"IOError\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"message\",\"type\":\"string\"}]}");
|
||||
public org.apache.avro.util.Utf8 message;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return message;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: message = (org.apache.avro.util.Utf8)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
package org.apache.hadoop.hbase.avro.generated;
|
||||
|
||||
@SuppressWarnings("all")
|
||||
public class TCell extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
|
||||
public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"TCell\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"value\",\"type\":\"bytes\"},{\"name\":\"timestamp\",\"type\":\"long\"}]}");
|
||||
public java.nio.ByteBuffer value;
|
||||
public long timestamp;
|
||||
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
|
||||
public java.lang.Object get(int field$) {
|
||||
switch (field$) {
|
||||
case 0: return value;
|
||||
case 1: return timestamp;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
@SuppressWarnings(value="unchecked")
|
||||
public void put(int field$, java.lang.Object value$) {
|
||||
switch (field$) {
|
||||
case 0: value = (java.nio.ByteBuffer)value$; break;
|
||||
case 1: timestamp = (java.lang.Long)value$; break;
|
||||
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue