HBASE-5258 Move coprocessors set out of RegionLoad (Sergey Shelukhin)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1418561 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2012-12-07 23:57:10 +00:00
parent 044f53bf33
commit f8cb35d156
6 changed files with 51 additions and 451 deletions

View File

@ -4040,17 +4040,7 @@ public final class HBaseProtos {
boolean hasTotalStaticBloomSizeKB();
int getTotalStaticBloomSizeKB();
// repeated .Coprocessor coprocessors = 15;
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor>
getCoprocessorsList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index);
int getCoprocessorsCount();
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>
getCoprocessorsOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder(
int index);
// optional uint64 completeSequenceId = 16;
// optional uint64 completeSequenceId = 15;
boolean hasCompleteSequenceId();
long getCompleteSequenceId();
}
@ -4226,29 +4216,8 @@ public final class HBaseProtos {
return totalStaticBloomSizeKB_;
}
// repeated .Coprocessor coprocessors = 15;
public static final int COPROCESSORS_FIELD_NUMBER = 15;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> coprocessors_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> getCoprocessorsList() {
return coprocessors_;
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>
getCoprocessorsOrBuilderList() {
return coprocessors_;
}
public int getCoprocessorsCount() {
return coprocessors_.size();
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) {
return coprocessors_.get(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder(
int index) {
return coprocessors_.get(index);
}
// optional uint64 completeSequenceId = 16;
public static final int COMPLETESEQUENCEID_FIELD_NUMBER = 16;
// optional uint64 completeSequenceId = 15;
public static final int COMPLETESEQUENCEID_FIELD_NUMBER = 15;
private long completeSequenceId_;
public boolean hasCompleteSequenceId() {
return ((bitField0_ & 0x00004000) == 0x00004000);
@ -4272,7 +4241,6 @@ public final class HBaseProtos {
rootIndexSizeKB_ = 0;
totalStaticIndexSizeKB_ = 0;
totalStaticBloomSizeKB_ = 0;
coprocessors_ = java.util.Collections.emptyList();
completeSequenceId_ = 0L;
}
private byte memoizedIsInitialized = -1;
@ -4288,12 +4256,6 @@ public final class HBaseProtos {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getCoprocessorsCount(); i++) {
if (!getCoprocessors(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@ -4343,11 +4305,8 @@ public final class HBaseProtos {
if (((bitField0_ & 0x00002000) == 0x00002000)) {
output.writeUInt32(14, totalStaticBloomSizeKB_);
}
for (int i = 0; i < coprocessors_.size(); i++) {
output.writeMessage(15, coprocessors_.get(i));
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
output.writeUInt64(16, completeSequenceId_);
output.writeUInt64(15, completeSequenceId_);
}
getUnknownFields().writeTo(output);
}
@ -4414,13 +4373,9 @@ public final class HBaseProtos {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(14, totalStaticBloomSizeKB_);
}
for (int i = 0; i < coprocessors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(15, coprocessors_.get(i));
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(16, completeSequenceId_);
.computeUInt64Size(15, completeSequenceId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
@ -4515,8 +4470,6 @@ public final class HBaseProtos {
result = result && (getTotalStaticBloomSizeKB()
== other.getTotalStaticBloomSizeKB());
}
result = result && getCoprocessorsList()
.equals(other.getCoprocessorsList());
result = result && (hasCompleteSequenceId() == other.hasCompleteSequenceId());
if (hasCompleteSequenceId()) {
result = result && (getCompleteSequenceId()
@ -4587,10 +4540,6 @@ public final class HBaseProtos {
hash = (37 * hash) + TOTALSTATICBLOOMSIZEKB_FIELD_NUMBER;
hash = (53 * hash) + getTotalStaticBloomSizeKB();
}
if (getCoprocessorsCount() > 0) {
hash = (37 * hash) + COPROCESSORS_FIELD_NUMBER;
hash = (53 * hash) + getCoprocessorsList().hashCode();
}
if (hasCompleteSequenceId()) {
hash = (37 * hash) + COMPLETESEQUENCEID_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getCompleteSequenceId());
@ -4704,7 +4653,6 @@ public final class HBaseProtos {
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getRegionSpecifierFieldBuilder();
getCoprocessorsFieldBuilder();
}
}
private static Builder create() {
@ -4745,14 +4693,8 @@ public final class HBaseProtos {
bitField0_ = (bitField0_ & ~0x00001000);
totalStaticBloomSizeKB_ = 0;
bitField0_ = (bitField0_ & ~0x00002000);
if (coprocessorsBuilder_ == null) {
coprocessors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00004000);
} else {
coprocessorsBuilder_.clear();
}
completeSequenceId_ = 0L;
bitField0_ = (bitField0_ & ~0x00008000);
bitField0_ = (bitField0_ & ~0x00004000);
return this;
}
@ -4851,16 +4793,7 @@ public final class HBaseProtos {
to_bitField0_ |= 0x00002000;
}
result.totalStaticBloomSizeKB_ = totalStaticBloomSizeKB_;
if (coprocessorsBuilder_ == null) {
if (((bitField0_ & 0x00004000) == 0x00004000)) {
coprocessors_ = java.util.Collections.unmodifiableList(coprocessors_);
bitField0_ = (bitField0_ & ~0x00004000);
}
result.coprocessors_ = coprocessors_;
} else {
result.coprocessors_ = coprocessorsBuilder_.build();
}
if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
to_bitField0_ |= 0x00004000;
}
result.completeSequenceId_ = completeSequenceId_;
@ -4922,32 +4855,6 @@ public final class HBaseProtos {
if (other.hasTotalStaticBloomSizeKB()) {
setTotalStaticBloomSizeKB(other.getTotalStaticBloomSizeKB());
}
if (coprocessorsBuilder_ == null) {
if (!other.coprocessors_.isEmpty()) {
if (coprocessors_.isEmpty()) {
coprocessors_ = other.coprocessors_;
bitField0_ = (bitField0_ & ~0x00004000);
} else {
ensureCoprocessorsIsMutable();
coprocessors_.addAll(other.coprocessors_);
}
onChanged();
}
} else {
if (!other.coprocessors_.isEmpty()) {
if (coprocessorsBuilder_.isEmpty()) {
coprocessorsBuilder_.dispose();
coprocessorsBuilder_ = null;
coprocessors_ = other.coprocessors_;
bitField0_ = (bitField0_ & ~0x00004000);
coprocessorsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getCoprocessorsFieldBuilder() : null;
} else {
coprocessorsBuilder_.addAllMessages(other.coprocessors_);
}
}
}
if (other.hasCompleteSequenceId()) {
setCompleteSequenceId(other.getCompleteSequenceId());
}
@ -4964,12 +4871,6 @@ public final class HBaseProtos {
return false;
}
for (int i = 0; i < getCoprocessorsCount(); i++) {
if (!getCoprocessors(i).isInitialized()) {
return false;
}
}
return true;
}
@ -5070,14 +4971,8 @@ public final class HBaseProtos {
totalStaticBloomSizeKB_ = input.readUInt32();
break;
}
case 122: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addCoprocessors(subBuilder.buildPartial());
break;
}
case 128: {
bitField0_ |= 0x00008000;
case 120: {
bitField0_ |= 0x00004000;
completeSequenceId_ = input.readUInt64();
break;
}
@ -5450,208 +5345,22 @@ public final class HBaseProtos {
return this;
}
// repeated .Coprocessor coprocessors = 15;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> coprocessors_ =
java.util.Collections.emptyList();
private void ensureCoprocessorsIsMutable() {
if (!((bitField0_ & 0x00004000) == 0x00004000)) {
coprocessors_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor>(coprocessors_);
bitField0_ |= 0x00004000;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> coprocessorsBuilder_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> getCoprocessorsList() {
if (coprocessorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(coprocessors_);
} else {
return coprocessorsBuilder_.getMessageList();
}
}
public int getCoprocessorsCount() {
if (coprocessorsBuilder_ == null) {
return coprocessors_.size();
} else {
return coprocessorsBuilder_.getCount();
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getCoprocessors(int index) {
if (coprocessorsBuilder_ == null) {
return coprocessors_.get(index);
} else {
return coprocessorsBuilder_.getMessage(index);
}
}
public Builder setCoprocessors(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) {
if (coprocessorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCoprocessorsIsMutable();
coprocessors_.set(index, value);
onChanged();
} else {
coprocessorsBuilder_.setMessage(index, value);
}
return this;
}
public Builder setCoprocessors(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) {
if (coprocessorsBuilder_ == null) {
ensureCoprocessorsIsMutable();
coprocessors_.set(index, builderForValue.build());
onChanged();
} else {
coprocessorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
public Builder addCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) {
if (coprocessorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCoprocessorsIsMutable();
coprocessors_.add(value);
onChanged();
} else {
coprocessorsBuilder_.addMessage(value);
}
return this;
}
public Builder addCoprocessors(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) {
if (coprocessorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCoprocessorsIsMutable();
coprocessors_.add(index, value);
onChanged();
} else {
coprocessorsBuilder_.addMessage(index, value);
}
return this;
}
public Builder addCoprocessors(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) {
if (coprocessorsBuilder_ == null) {
ensureCoprocessorsIsMutable();
coprocessors_.add(builderForValue.build());
onChanged();
} else {
coprocessorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
public Builder addCoprocessors(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) {
if (coprocessorsBuilder_ == null) {
ensureCoprocessorsIsMutable();
coprocessors_.add(index, builderForValue.build());
onChanged();
} else {
coprocessorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
public Builder addAllCoprocessors(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> values) {
if (coprocessorsBuilder_ == null) {
ensureCoprocessorsIsMutable();
super.addAll(values, coprocessors_);
onChanged();
} else {
coprocessorsBuilder_.addAllMessages(values);
}
return this;
}
public Builder clearCoprocessors() {
if (coprocessorsBuilder_ == null) {
coprocessors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00004000);
onChanged();
} else {
coprocessorsBuilder_.clear();
}
return this;
}
public Builder removeCoprocessors(int index) {
if (coprocessorsBuilder_ == null) {
ensureCoprocessorsIsMutable();
coprocessors_.remove(index);
onChanged();
} else {
coprocessorsBuilder_.remove(index);
}
return this;
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getCoprocessorsBuilder(
int index) {
return getCoprocessorsFieldBuilder().getBuilder(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getCoprocessorsOrBuilder(
int index) {
if (coprocessorsBuilder_ == null) {
return coprocessors_.get(index); } else {
return coprocessorsBuilder_.getMessageOrBuilder(index);
}
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>
getCoprocessorsOrBuilderList() {
if (coprocessorsBuilder_ != null) {
return coprocessorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(coprocessors_);
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder() {
return getCoprocessorsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addCoprocessorsBuilder(
int index) {
return getCoprocessorsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance());
}
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder>
getCoprocessorsBuilderList() {
return getCoprocessorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>
getCoprocessorsFieldBuilder() {
if (coprocessorsBuilder_ == null) {
coprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>(
coprocessors_,
((bitField0_ & 0x00004000) == 0x00004000),
getParentForChildren(),
isClean());
coprocessors_ = null;
}
return coprocessorsBuilder_;
}
// optional uint64 completeSequenceId = 16;
// optional uint64 completeSequenceId = 15;
private long completeSequenceId_ ;
public boolean hasCompleteSequenceId() {
return ((bitField0_ & 0x00008000) == 0x00008000);
return ((bitField0_ & 0x00004000) == 0x00004000);
}
public long getCompleteSequenceId() {
return completeSequenceId_;
}
public Builder setCompleteSequenceId(long value) {
bitField0_ |= 0x00008000;
bitField0_ |= 0x00004000;
completeSequenceId_ = value;
onChanged();
return this;
}
public Builder clearCompleteSequenceId() {
bitField0_ = (bitField0_ & ~0x00008000);
bitField0_ = (bitField0_ & ~0x00004000);
completeSequenceId_ = 0L;
onChanged();
return this;
@ -11223,7 +10932,7 @@ public final class HBaseProtos {
"\0222\n\004type\030\001 \002(\0162$.RegionSpecifier.RegionS" +
"pecifierType\022\r\n\005value\030\002 \002(\014\"?\n\023RegionSpe" +
"cifierType\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCODED_R" +
"EGION_NAME\020\002\"\324\003\n\nRegionLoad\022)\n\017regionSpe" +
"EGION_NAME\020\002\"\260\003\n\nRegionLoad\022)\n\017regionSpe" +
"cifier\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006stores" +
"\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(\r\022\037\n\027storeUncom" +
"pressedSizeMB\030\004 \001(\r\022\027\n\017storefileSizeMB\030\005" +
@ -11233,33 +10942,32 @@ public final class HBaseProtos {
"alCompactingKVs\030\n \001(\004\022\033\n\023currentCompacte" +
"dKVs\030\013 \001(\004\022\027\n\017rootIndexSizeKB\030\014 \001(\r\022\036\n\026t" +
"otalStaticIndexSizeKB\030\r \001(\r\022\036\n\026totalStat" +
"icBloomSizeKB\030\016 \001(\r\022\"\n\014coprocessors\030\017 \003(" +
"\0132\014.Coprocessor\022\032\n\022completeSequenceId\030\020 " +
"\001(\004\"\372\001\n\nServerLoad\022\030\n\020numberOfRequests\030\001" +
" \001(\r\022\035\n\025totalNumberOfRequests\030\002 \001(\r\022\022\n\nu" +
"sedHeapMB\030\003 \001(\r\022\021\n\tmaxHeapMB\030\004 \001(\r\022 \n\013re" +
"gionLoads\030\005 \003(\0132\013.RegionLoad\022\"\n\014coproces",
"sors\030\006 \003(\0132\014.Coprocessor\022\027\n\017reportStartT" +
"ime\030\007 \001(\004\022\025\n\rreportEndTime\030\010 \001(\004\022\026\n\016info" +
"ServerPort\030\t \001(\r\"%\n\tTimeRange\022\014\n\004from\030\001 " +
"\001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Filter\022\014\n\004name\030\001 \002(\t\022" +
"\030\n\020serializedFilter\030\002 \001(\014\"w\n\010KeyValue\022\013\n" +
"\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifier\030" +
"\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004\022\031\n\007keyType\030\005 \001(" +
"\0162\010.KeyType\022\r\n\005value\030\006 \001(\014\"?\n\nServerName" +
"\022\020\n\010hostName\030\001 \002(\t\022\014\n\004port\030\002 \001(\r\022\021\n\tstar" +
"tCode\030\003 \001(\004\"\033\n\013Coprocessor\022\014\n\004name\030\001 \002(\t",
"\"-\n\016NameStringPair\022\014\n\004name\030\001 \002(\t\022\r\n\005valu" +
"e\030\002 \002(\t\",\n\rNameBytesPair\022\014\n\004name\030\001 \002(\t\022\r" +
"\n\005value\030\002 \001(\014\"/\n\016BytesBytesPair\022\r\n\005first" +
"\030\001 \002(\014\022\016\n\006second\030\002 \002(\014*r\n\013CompareType\022\010\n" +
"\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n" +
"\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GR" +
"EATER\020\005\022\t\n\005NO_OP\020\006*_\n\007KeyType\022\013\n\007MINIMUM" +
"\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN" +
"\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*o" +
"rg.apache.hadoop.hbase.protobuf.generate",
"dB\013HBaseProtosH\001\240\001\001"
"icBloomSizeKB\030\016 \001(\r\022\032\n\022completeSequenceI" +
"d\030\017 \001(\004\"\372\001\n\nServerLoad\022\030\n\020numberOfReques" +
"ts\030\001 \001(\r\022\035\n\025totalNumberOfRequests\030\002 \001(\r\022" +
"\022\n\nusedHeapMB\030\003 \001(\r\022\021\n\tmaxHeapMB\030\004 \001(\r\022 " +
"\n\013regionLoads\030\005 \003(\0132\013.RegionLoad\022\"\n\014copr" +
"ocessors\030\006 \003(\0132\014.Coprocessor\022\027\n\017reportSt",
"artTime\030\007 \001(\004\022\025\n\rreportEndTime\030\010 \001(\004\022\026\n\016" +
"infoServerPort\030\t \001(\r\"%\n\tTimeRange\022\014\n\004fro" +
"m\030\001 \001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Filter\022\014\n\004name\030\001 " +
"\002(\t\022\030\n\020serializedFilter\030\002 \001(\014\"w\n\010KeyValu" +
"e\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualif" +
"ier\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004\022\031\n\007keyType\030" +
"\005 \001(\0162\010.KeyType\022\r\n\005value\030\006 \001(\014\"?\n\nServer" +
"Name\022\020\n\010hostName\030\001 \002(\t\022\014\n\004port\030\002 \001(\r\022\021\n\t" +
"startCode\030\003 \001(\004\"\033\n\013Coprocessor\022\014\n\004name\030\001" +
" \002(\t\"-\n\016NameStringPair\022\014\n\004name\030\001 \002(\t\022\r\n\005",
"value\030\002 \002(\t\",\n\rNameBytesPair\022\014\n\004name\030\001 \002" +
"(\t\022\r\n\005value\030\002 \001(\014\"/\n\016BytesBytesPair\022\r\n\005f" +
"irst\030\001 \002(\014\022\016\n\006second\030\002 \002(\014*r\n\013CompareTyp" +
"e\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020" +
"\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013" +
"\n\007GREATER\020\005\022\t\n\005NO_OP\020\006*_\n\007KeyType\022\013\n\007MIN" +
"IMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_CO" +
"LUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B" +
">\n*org.apache.hadoop.hbase.protobuf.gene" +
"ratedB\013HBaseProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@ -11319,7 +11027,7 @@ public final class HBaseProtos {
internal_static_RegionLoad_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RegionLoad_descriptor,
new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "Coprocessors", "CompleteSequenceId", },
new java.lang.String[] { "RegionSpecifier", "Stores", "Storefiles", "StoreUncompressedSizeMB", "StorefileSizeMB", "MemstoreSizeMB", "StorefileIndexSizeMB", "ReadRequestsCount", "WriteRequestsCount", "TotalCompactingKVs", "CurrentCompactedKVs", "RootIndexSizeKB", "TotalStaticIndexSizeKB", "TotalStaticBloomSizeKB", "CompleteSequenceId", },
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.class,
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad.Builder.class);
internal_static_ServerLoad_descriptor =

View File

@ -130,11 +130,8 @@ message RegionLoad {
*/
optional uint32 totalStaticBloomSizeKB = 14;
/** Region-level coprocessors. */
repeated Coprocessor coprocessors = 15;
/** the most recent sequence Id from cache flush */
optional uint64 completeSequenceId = 16;
optional uint64 completeSequenceId = 15;
}
/* Server-level protobufs */

View File

@ -38,16 +38,6 @@ public class RegionLoad {
this.regionLoadPB = regionLoadPB;
}
// Getters
private String[] getCoprocessors() {
int numCoprocessors = regionLoadPB.getCoprocessorsCount();
String [] ret = new String[numCoprocessors];
for (int i = 0; i < numCoprocessors; ++i) {
ret[i] = regionLoadPB.getCoprocessors(i).getName();
}
return ret;
}
/**
* @return the region name
*/

View File

@ -229,30 +229,16 @@ public class ServerLoad {
* @return string array of loaded RegionServer-level and
* Region-level coprocessors
*/
public String[] getAllCoprocessors() {
public String[] getRsCoprocessors() {
// Need a set to remove duplicates, but since generated Coprocessor class
// is not Comparable, make it a Set<String> instead of Set<Coprocessor>
TreeSet<String> coprocessSet = new TreeSet<String>();
for (Coprocessor coprocessor : obtainServerLoadPB().getCoprocessorsList()) {
coprocessSet.add(coprocessor.getName());
}
for (HBaseProtos.RegionLoad rl : obtainServerLoadPB().getRegionLoadsList()) {
for (Coprocessor coprocessor : rl.getCoprocessorsList()) {
coprocessSet.add(coprocessor.getName());
}
}
return coprocessSet.toArray(new String[0]);
}
/**
* @deprecated Use getAllCoprocessors instead
*/
public String[] getCoprocessors() {
return getAllCoprocessors();
}
/**
* @return number of requests per second received since the last report
*/
@ -307,7 +293,7 @@ public class ServerLoad {
}
sb = Strings.appendKeyValue(sb, "compactionProgressPct", compactionProgressPct);
String[] coprocessorStrings = getAllCoprocessors();
String[] coprocessorStrings = getRsCoprocessors();
if (coprocessorStrings != null) {
sb = Strings.appendKeyValue(sb, "coprocessors", Arrays.toString(coprocessorStrings));
}

View File

@ -1293,11 +1293,6 @@ public class HRegionServer implements ClientProtocol,
.setTotalCompactingKVs(totalCompactingKVs)
.setCurrentCompactedKVs(currentCompactedKVs)
.setCompleteSequenceId(r.completeSequenceId);
Set<String> coprocessors = r.getCoprocessorHost().getCoprocessors();
for (String coprocessor : coprocessors) {
regionLoad.addCoprocessors(
Coprocessor.newBuilder().setName(coprocessor).build());
}
return regionLoad.build();
}

View File

@ -69,14 +69,6 @@ public class TestClassLoading {
private static final String[] regionServerSystemCoprocessors =
new String[]{
regionCoprocessor1.getSimpleName(),
regionServerCoprocessor.getSimpleName()
};
private static final String[] regionServerSystemAndUserCoprocessors =
new String[] {
regionCoprocessor1.getSimpleName(),
regionCoprocessor2.getSimpleName(),
regionServerCoprocessor.getSimpleName()
};
@ -431,6 +423,8 @@ public class TestClassLoading {
File outerJarFile = new File(TEST_UTIL.getDataTestDir().toString(), "outer.jar");
byte buffer[] = new byte[BUFFER_SIZE];
// TODO: code here and elsewhere in this file is duplicated w/TestClassFinder.
// Some refactoring may be in order...
// Open archive file
FileOutputStream stream = new FileOutputStream(outerJarFile);
JarOutputStream out = new JarOutputStream(stream, new Manifest());
@ -440,7 +434,7 @@ public class TestClassLoading {
JarEntry jarAdd = new JarEntry("/lib/" + jarFile.getName());
jarAdd.setTime(jarFile.lastModified());
out.putNextEntry(jarAdd);
// Write file to archive
FileInputStream in = new FileInputStream(jarFile);
while (true) {
@ -514,82 +508,12 @@ public class TestClassLoading {
@Test
public void testRegionServerCoprocessorsReported() throws Exception {
// HBASE 4070: Improve region server metrics to report loaded coprocessors
// to master: verify that each regionserver is reporting the correct set of
// loaded coprocessors.
// We rely on the fact that getCoprocessors() will return a sorted
// display of the coprocessors' names, so for example, regionCoprocessor1's
// name "ColumnAggregationEndpoint" will appear before regionCoprocessor2's
// name "GenericEndpoint" because "C" is before "G" lexicographically.
// This was a test for HBASE-4070.
// We are removing coprocessors from region load in HBASE-5258.
// Therefore, this test now only checks system coprocessors.
HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
// disable all user tables, if any are loaded.
for (HTableDescriptor htd: admin.listTables()) {
if (!htd.isMetaTable()) {
String tableName = htd.getNameAsString();
if (admin.isTableEnabled(tableName)) {
try {
admin.disableTable(htd.getNameAsString());
} catch (TableNotEnabledException e) {
// ignoring this exception for now : not sure why it's happening.
}
}
}
}
// should only be system coprocessors loaded at this point.
assertAllRegionServers(regionServerSystemCoprocessors,null);
// The next two tests enable and disable user tables to see if coprocessor
// load reporting changes as coprocessors are loaded and unloaded.
//
// Create a table.
// should cause regionCoprocessor2 to be loaded, since we've specified it
// for loading on any user table with USER_REGION_COPROCESSOR_CONF_KEY
// in setUpBeforeClass().
String userTable1 = "userTable1";
HTableDescriptor userTD1 = new HTableDescriptor(userTable1);
admin.createTable(userTD1);
waitForTable(userTD1.getName());
// table should be enabled now.
assertTrue(admin.isTableEnabled(userTable1));
assertAllRegionServers(regionServerSystemAndUserCoprocessors, userTable1);
// unload and make sure we're back to only system coprocessors again.
admin.disableTable(userTable1);
assertAllRegionServers(regionServerSystemCoprocessors,null);
// create another table, with its own specified coprocessor.
String userTable2 = "userTable2";
HTableDescriptor htd2 = new HTableDescriptor(userTable2);
String userTableCP = "userTableCP";
File jarFile1 = buildCoprocessorJar(userTableCP);
htd2.addFamily(new HColumnDescriptor("myfamily"));
htd2.setValue("COPROCESSOR$1", jarFile1.toString() + "|" + userTableCP +
"|" + Coprocessor.PRIORITY_USER);
admin.createTable(htd2);
waitForTable(htd2.getName());
// table should be enabled now.
assertTrue(admin.isTableEnabled(userTable2));
ArrayList<String> existingCPsPlusNew =
new ArrayList<String>(Arrays.asList(regionServerSystemAndUserCoprocessors));
existingCPsPlusNew.add(userTableCP);
String[] existingCPsPlusNewArray = new String[existingCPsPlusNew.size()];
assertAllRegionServers(existingCPsPlusNew.toArray(existingCPsPlusNewArray),
userTable2);
admin.disableTable(userTable2);
assertTrue(admin.isTableDisabled(userTable2));
// we should be back to only system coprocessors again.
assertAllRegionServers(regionServerSystemCoprocessors, null);
}
/**
@ -636,7 +560,7 @@ public class TestClassLoading {
}
boolean any_failed = false;
for(Map.Entry<ServerName,ServerLoad> server: servers.entrySet()) {
actualCoprocessors = server.getValue().getAllCoprocessors();
actualCoprocessors = server.getValue().getRsCoprocessors();
if (!Arrays.equals(actualCoprocessors, expectedCoprocessors)) {
LOG.debug("failed comparison: actual: " +
Arrays.toString(actualCoprocessors) +