Add admin endpoint for updating the configuration on the regionserver

Summary: This is a follow up diff for the jira to add the functionality to update configs in an online fashion.

Test Plan: Unit test. This adds an end point on the regionserver. It just tries to call the end point and make sure the call succeeds.

Differential Revision: https://reviews.facebook.net/D24861

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
manukranthk 2014-10-13 18:40:43 -07:00 committed by stack
parent 7e995b6496
commit 5c92bded9d
9 changed files with 924 additions and 29 deletions

View File

@ -1278,4 +1278,21 @@ public interface Admin extends Abortable, Closeable {
* @return A RegionServerCoprocessorRpcChannel instance
*/
CoprocessorRpcChannel coprocessorService(ServerName sn);
/**
* Update the configuration and trigger an online config change
* on the regionserver
* @param server : The server whose config needs to be updated.
* @throws IOException
*/
void updateConfiguration(ServerName server) throws IOException;
/**
* Update the configuration and trigger an online config change
* on all the regionservers
* @throws IOException
*/
void updateConfiguration() throws IOException;
}

View File

@ -84,6 +84,7 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRespo
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
@ -3729,5 +3730,21 @@ public class HBaseAdmin implements Admin {
public CoprocessorRpcChannel coprocessorService(ServerName sn) {
return new RegionServerCoprocessorRpcChannel(connection, sn);
}
@Override
public void updateConfiguration(ServerName server) throws IOException {
try {
this.connection.getAdmin(server).updateConfiguration(null,
UpdateConfigurationRequest.getDefaultInstance());
} catch (ServiceException e) {
throw ProtobufUtil.getRemoteException(e);
}
}
@Override
public void updateConfiguration() throws IOException {
for (ServerName server : this.getClusterStatus().getServers()) {
updateConfiguration(server);
}
}
}

View File

@ -19879,6 +19879,682 @@ public final class AdminProtos {
// @@protoc_insertion_point(class_scope:GetServerInfoResponse)
}
public interface UpdateConfigurationRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code UpdateConfigurationRequest}
*/
public static final class UpdateConfigurationRequest extends
com.google.protobuf.GeneratedMessage
implements UpdateConfigurationRequestOrBuilder {
// Use UpdateConfigurationRequest.newBuilder() to construct.
private UpdateConfigurationRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private UpdateConfigurationRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final UpdateConfigurationRequest defaultInstance;
public static UpdateConfigurationRequest getDefaultInstance() {
return defaultInstance;
}
public UpdateConfigurationRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UpdateConfigurationRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.Builder.class);
}
public static com.google.protobuf.Parser<UpdateConfigurationRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateConfigurationRequest>() {
public UpdateConfigurationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateConfigurationRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<UpdateConfigurationRequest> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code UpdateConfigurationRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest build() {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:UpdateConfigurationRequest)
}
static {
defaultInstance = new UpdateConfigurationRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:UpdateConfigurationRequest)
}
public interface UpdateConfigurationResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code UpdateConfigurationResponse}
*/
public static final class UpdateConfigurationResponse extends
com.google.protobuf.GeneratedMessage
implements UpdateConfigurationResponseOrBuilder {
// Use UpdateConfigurationResponse.newBuilder() to construct.
private UpdateConfigurationResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private UpdateConfigurationResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final UpdateConfigurationResponse defaultInstance;
public static UpdateConfigurationResponse getDefaultInstance() {
return defaultInstance;
}
public UpdateConfigurationResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UpdateConfigurationResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.Builder.class);
}
public static com.google.protobuf.Parser<UpdateConfigurationResponse> PARSER =
new com.google.protobuf.AbstractParser<UpdateConfigurationResponse>() {
public UpdateConfigurationResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateConfigurationResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<UpdateConfigurationResponse> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code UpdateConfigurationResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UpdateConfigurationResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse build() {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:UpdateConfigurationResponse)
}
static {
defaultInstance = new UpdateConfigurationResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:UpdateConfigurationResponse)
}
/**
* Protobuf service {@code AdminService}
*/
@ -20007,6 +20683,14 @@ public final class AdminProtos {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse> done);
/**
* <code>rpc UpdateConfiguration(.UpdateConfigurationRequest) returns (.UpdateConfigurationResponse);</code>
*/
public abstract void updateConfiguration(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done);
}
public static com.google.protobuf.Service newReflectiveService(
@ -20132,6 +20816,14 @@ public final class AdminProtos {
impl.updateFavoredNodes(controller, request, done);
}
@java.lang.Override
public void updateConfiguration(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done) {
impl.updateConfiguration(controller, request, done);
}
};
}
@ -20184,6 +20876,8 @@ public final class AdminProtos {
return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request);
case 14:
return impl.updateFavoredNodes(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest)request);
case 15:
return impl.updateConfiguration(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
@ -20228,6 +20922,8 @@ public final class AdminProtos {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
case 14:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.getDefaultInstance();
case 15:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@ -20272,6 +20968,8 @@ public final class AdminProtos {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
case 14:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance();
case 15:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@ -20400,6 +21098,14 @@ public final class AdminProtos {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse> done);
/**
* <code>rpc UpdateConfiguration(.UpdateConfigurationRequest) returns (.UpdateConfigurationResponse);</code>
*/
public abstract void updateConfiguration(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@ -20497,6 +21203,11 @@ public final class AdminProtos {
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse>specializeCallback(
done));
return;
case 15:
this.updateConfiguration(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse>specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
@ -20541,6 +21252,8 @@ public final class AdminProtos {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance();
case 14:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.getDefaultInstance();
case 15:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@ -20585,6 +21298,8 @@ public final class AdminProtos {
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance();
case 14:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance();
case 15:
return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@ -20830,6 +21545,21 @@ public final class AdminProtos {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.class,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance()));
}
public void updateConfiguration(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(15),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.class,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
@ -20912,6 +21642,11 @@ public final class AdminProtos {
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse updateConfiguration(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
@ -21100,6 +21835,18 @@ public final class AdminProtos {
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse updateConfiguration(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(15),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:AdminService)
@ -21265,6 +22012,16 @@ public final class AdminProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetServerInfoResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_UpdateConfigurationRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_UpdateConfigurationRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_UpdateConfigurationResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_UpdateConfigurationResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -21333,33 +22090,37 @@ public final class AdminProtos {
"e\"\026\n\024GetServerInfoRequest\"B\n\nServerInfo\022" +
" \n\013server_name\030\001 \002(\0132\013.ServerName\022\022\n\nweb" +
"ui_port\030\002 \001(\r\"9\n\025GetServerInfoResponse\022 " +
"\n\013server_info\030\001 \002(\0132\013.ServerInfo2\306\007\n\014Adm",
"inService\022>\n\rGetRegionInfo\022\025.GetRegionIn" +
"foRequest\032\026.GetRegionInfoResponse\022;\n\014Get" +
"StoreFile\022\024.GetStoreFileRequest\032\025.GetSto" +
"reFileResponse\022D\n\017GetOnlineRegion\022\027.GetO" +
"nlineRegionRequest\032\030.GetOnlineRegionResp" +
"onse\0225\n\nOpenRegion\022\022.OpenRegionRequest\032\023" +
".OpenRegionResponse\0228\n\013CloseRegion\022\023.Clo" +
"seRegionRequest\032\024.CloseRegionResponse\0228\n" +
"\013FlushRegion\022\023.FlushRegionRequest\032\024.Flus" +
"hRegionResponse\0228\n\013SplitRegion\022\023.SplitRe",
"gionRequest\032\024.SplitRegionResponse\022>\n\rCom" +
"pactRegion\022\025.CompactRegionRequest\032\026.Comp" +
"actRegionResponse\022;\n\014MergeRegions\022\024.Merg" +
"eRegionsRequest\032\025.MergeRegionsResponse\022J" +
"\n\021ReplicateWALEntry\022\031.ReplicateWALEntryR" +
"equest\032\032.ReplicateWALEntryResponse\022?\n\006Re" +
"play\022\031.ReplicateWALEntryRequest\032\032.Replic" +
"ateWALEntryResponse\022>\n\rRollWALWriter\022\025.R" +
"ollWALWriterRequest\032\026.RollWALWriterRespo" +
"nse\022>\n\rGetServerInfo\022\025.GetServerInfoRequ",
"est\032\026.GetServerInfoResponse\0225\n\nStopServe" +
"r\022\022.StopServerRequest\032\023.StopServerRespon" +
"se\022M\n\022UpdateFavoredNodes\022\032.UpdateFavored" +
"NodesRequest\032\033.UpdateFavoredNodesRespons" +
"eBA\n*org.apache.hadoop.hbase.protobuf.ge" +
"neratedB\013AdminProtosH\001\210\001\001\240\001\001"
"\n\013server_info\030\001 \002(\0132\013.ServerInfo\"\034\n\032Upda",
"teConfigurationRequest\"\035\n\033UpdateConfigur" +
"ationResponse2\230\010\n\014AdminService\022>\n\rGetReg" +
"ionInfo\022\025.GetRegionInfoRequest\032\026.GetRegi" +
"onInfoResponse\022;\n\014GetStoreFile\022\024.GetStor" +
"eFileRequest\032\025.GetStoreFileResponse\022D\n\017G" +
"etOnlineRegion\022\027.GetOnlineRegionRequest\032" +
"\030.GetOnlineRegionResponse\0225\n\nOpenRegion\022" +
"\022.OpenRegionRequest\032\023.OpenRegionResponse" +
"\0228\n\013CloseRegion\022\023.CloseRegionRequest\032\024.C" +
"loseRegionResponse\0228\n\013FlushRegion\022\023.Flus",
"hRegionRequest\032\024.FlushRegionResponse\0228\n\013" +
"SplitRegion\022\023.SplitRegionRequest\032\024.Split" +
"RegionResponse\022>\n\rCompactRegion\022\025.Compac" +
"tRegionRequest\032\026.CompactRegionResponse\022;" +
"\n\014MergeRegions\022\024.MergeRegionsRequest\032\025.M" +
"ergeRegionsResponse\022J\n\021ReplicateWALEntry" +
"\022\031.ReplicateWALEntryRequest\032\032.ReplicateW" +
"ALEntryResponse\022?\n\006Replay\022\031.ReplicateWAL" +
"EntryRequest\032\032.ReplicateWALEntryResponse" +
"\022>\n\rRollWALWriter\022\025.RollWALWriterRequest",
"\032\026.RollWALWriterResponse\022>\n\rGetServerInf" +
"o\022\025.GetServerInfoRequest\032\026.GetServerInfo" +
"Response\0225\n\nStopServer\022\022.StopServerReque" +
"st\032\023.StopServerResponse\022M\n\022UpdateFavored" +
"Nodes\022\032.UpdateFavoredNodesRequest\032\033.Upda" +
"teFavoredNodesResponse\022P\n\023UpdateConfigur" +
"ation\022\033.UpdateConfigurationRequest\032\034.Upd" +
"ateConfigurationResponseBA\n*org.apache.h" +
"adoop.hbase.protobuf.generatedB\013AdminPro" +
"tosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@ -21558,6 +22319,18 @@ public final class AdminProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetServerInfoResponse_descriptor,
new java.lang.String[] { "ServerInfo", });
internal_static_UpdateConfigurationRequest_descriptor =
getDescriptor().getMessageTypes().get(30);
internal_static_UpdateConfigurationRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_UpdateConfigurationRequest_descriptor,
new java.lang.String[] { });
internal_static_UpdateConfigurationResponse_descriptor =
getDescriptor().getMessageTypes().get(31);
internal_static_UpdateConfigurationResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_UpdateConfigurationResponse_descriptor,
new java.lang.String[] { });
return null;
}
};

View File

@ -228,6 +228,12 @@ message GetServerInfoResponse {
required ServerInfo server_info = 1;
}
message UpdateConfigurationRequest {
}
message UpdateConfigurationResponse {
}
service AdminService {
rpc GetRegionInfo(GetRegionInfoRequest)
returns(GetRegionInfoResponse);
@ -273,4 +279,7 @@ service AdminService {
rpc UpdateFavoredNodes(UpdateFavoredNodesRequest)
returns(UpdateFavoredNodesResponse);
rpc UpdateConfiguration(UpdateConfigurationRequest)
returns(UpdateConfigurationResponse);
}

View File

@ -77,7 +77,6 @@ import org.apache.hadoop.hbase.client.ConnectionUtils;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.ConfigurationObserver;
import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@ -3112,6 +3111,15 @@ public class HRegionServer extends HasThread implements
*/
protected ConfigurationManager getConfigurationManager() {
return configurationManager;
}
/**
* Reload the configuration from disk.
*/
public void updateConfiguration() {
LOG.info("Reloading the configuration from disk.");
// Reload the configuration from disk.
conf.reloadConfiguration();
configurationManager.notifyAllObservers(conf);
}
}

View File

@ -112,6 +112,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
@ -2196,4 +2198,16 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
CoprocessorServiceRequest request) throws ServiceException {
return regionServer.execRegionServerService(controller, request);
}
@Override
public UpdateConfigurationResponse updateConfiguration(
RpcController controller, UpdateConfigurationRequest request)
throws ServiceException {
try {
this.regionServer.updateConfiguration();
} catch (Exception e) {
throw new ServiceException(e);
}
return UpdateConfigurationResponse.getDefaultInstance();
}
}

View File

@ -0,0 +1,49 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({MediumTests.class})
public class TestUpdateConfiguration {
private static final Log LOG = LogFactory.getLog(TestUpdateConfiguration.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@BeforeClass
public static void setup() throws Exception {
TEST_UTIL.startMiniCluster();
}
@Test
public void testOnlineConfigChange() throws IOException {
LOG.debug("Starting the test");
Admin admin = TEST_UTIL.getHBaseAdmin();
ServerName server = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
admin.updateConfiguration(server);
}
}

View File

@ -73,6 +73,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@ -605,4 +607,11 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
// TODO Auto-generated method stub
return null;
}
@Override
public UpdateConfigurationResponse updateConfiguration(
RpcController controller, UpdateConfigurationRequest request)
throws ServiceException {
return null;
}
}

View File

@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category;