HBASE-18431 Mitigate compatibility concerns between branch-1.3 and branch-1.4
- Refactor TableName into its own proto module and fix up users - Move SnapshotDescription from Snapshot.proto back to HBase.proto - Restore FastLongHistogram and TestFastLongHistogram; deprecate FastLongHistogram - Move DeleteQueryTracker back to o.a.h.h.regionserver
This commit is contained in:
parent
6255dc7001
commit
3feb87b005
|
@ -43,9 +43,8 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
|
|||
import org.apache.hadoop.hbase.client.security.SecurityCapability;
|
||||
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaFilter;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaRetriever;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaSettings;
|
||||
|
|
|
@ -95,6 +95,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
|
|||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest;
|
||||
|
@ -156,7 +157,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest
|
|||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.TableProtos;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaFilter;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaRetriever;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaSettings;
|
||||
|
@ -3174,7 +3175,7 @@ public class HBaseAdmin implements Admin {
|
|||
public TableName[] call(int callTimeout) throws Exception {
|
||||
HBaseRpcController controller = rpcControllerFactory.newController();
|
||||
controller.setCallTimeout(callTimeout);
|
||||
List<HBaseProtos.TableName> tableNames =
|
||||
List<TableProtos.TableName> tableNames =
|
||||
master.listTableNamesByNamespace(controller, ListTableNamesByNamespaceRequest.
|
||||
newBuilder().setNamespaceName(name).build())
|
||||
.getTableNameList();
|
||||
|
|
|
@ -145,6 +145,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
|
|||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.TableProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
|
||||
|
@ -2975,18 +2976,18 @@ public final class ProtobufUtil {
|
|||
", type=" + proto.getMutateType().toString();
|
||||
}
|
||||
|
||||
public static TableName toTableName(HBaseProtos.TableName tableNamePB) {
|
||||
public static TableName toTableName(TableProtos.TableName tableNamePB) {
|
||||
return TableName.valueOf(tableNamePB.getNamespace().asReadOnlyByteBuffer(),
|
||||
tableNamePB.getQualifier().asReadOnlyByteBuffer());
|
||||
}
|
||||
|
||||
public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
|
||||
return HBaseProtos.TableName.newBuilder()
|
||||
public static TableProtos.TableName toProtoTableName(TableName tableName) {
|
||||
return TableProtos.TableName.newBuilder()
|
||||
.setNamespace(ByteStringer.wrap(tableName.getNamespace()))
|
||||
.setQualifier(ByteStringer.wrap(tableName.getQualifier())).build();
|
||||
}
|
||||
|
||||
public static TableName[] getTableNameArray(List<HBaseProtos.TableName> tableNamesList) {
|
||||
public static TableName[] getTableNameArray(List<TableProtos.TableName> tableNamesList) {
|
||||
if (tableNamesList == null) {
|
||||
return new TableName[0];
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* General exception base class for when a snapshot fails
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* Thrown when a snapshot could not be restored due to a server-side error when restoring it.
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* Thrown when a snapshot could not be created due to a server-side error when
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.snapshot;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* Thrown when a snapshot exists but should not
|
||||
|
|
|
@ -32,11 +32,11 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
|
|
@ -0,0 +1,319 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.util;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* FastLongHistogram is a thread-safe class that estimate distribution of data and computes the
|
||||
* quantiles.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class FastLongHistogram {
|
||||
|
||||
/**
|
||||
* Default number of bins.
|
||||
*/
|
||||
public static final int DEFAULT_NBINS = 255;
|
||||
|
||||
public static final double[] DEFAULT_QUANTILES =
|
||||
new double[]{0.25, 0.5, 0.75, 0.90, 0.95, 0.98, 0.99, 0.999};
|
||||
|
||||
/**
|
||||
* Bins is a class containing a list of buckets(or bins) for estimation histogram of some data.
|
||||
*/
|
||||
private static class Bins {
|
||||
private final Counter[] counts;
|
||||
// inclusive
|
||||
private final long binsMin;
|
||||
// exclusive
|
||||
private final long binsMax;
|
||||
private final long bins10XMax;
|
||||
private final AtomicLong min = new AtomicLong(Long.MAX_VALUE);
|
||||
private final AtomicLong max = new AtomicLong(0L);
|
||||
|
||||
private final Counter count = new Counter(0);
|
||||
private final Counter total = new Counter(0);
|
||||
|
||||
// set to true when any of data has been inserted to the Bins. It is set after the counts are
|
||||
// updated.
|
||||
private final AtomicBoolean hasData = new AtomicBoolean(false);
|
||||
|
||||
/**
|
||||
* The constructor for creating a Bins without any prior data.
|
||||
*/
|
||||
public Bins(int numBins) {
|
||||
counts = createCounters(numBins + 3);
|
||||
this.binsMin = 1L;
|
||||
|
||||
// These two numbers are total guesses
|
||||
// and should be treated as highly suspect.
|
||||
this.binsMax = 1000;
|
||||
this.bins10XMax = binsMax * 10;
|
||||
}
|
||||
|
||||
/**
|
||||
* The constructor for creating a Bins with last Bins.
|
||||
*/
|
||||
public Bins(Bins last, int numOfBins, double minQ, double maxQ) {
|
||||
long[] values = last.getQuantiles(new double[] { minQ, maxQ });
|
||||
long wd = values[1] - values[0] + 1;
|
||||
// expand minQ and maxQ in two ends back assuming uniform distribution
|
||||
this.binsMin = Math.max(0L, (long) (values[0] - wd * minQ));
|
||||
long binsMax = (long) (values[1] + wd * (1 - maxQ)) + 1;
|
||||
// make sure each of bins is at least of width 1
|
||||
this.binsMax = Math.max(binsMax, this.binsMin + numOfBins);
|
||||
this.bins10XMax = Math.max((long) (values[1] + (binsMax - 1) * 9), this.binsMax + 1);
|
||||
|
||||
this.counts = createCounters(numOfBins + 3);
|
||||
}
|
||||
|
||||
private Counter[] createCounters(int num) {
|
||||
Counter[] counters = new Counter[num];
|
||||
for (int i = 0; i < num; i++) {
|
||||
counters[i] = new Counter();
|
||||
}
|
||||
return counters;
|
||||
}
|
||||
|
||||
private int getIndex(long value) {
|
||||
if (value < this.binsMin) {
|
||||
return 0;
|
||||
} else if (value > this.bins10XMax) {
|
||||
return this.counts.length - 1;
|
||||
} else if (value >= this.binsMax) {
|
||||
return this.counts.length - 2;
|
||||
}
|
||||
// compute the position
|
||||
return 1 + (int) ((value - this.binsMin) * (this.counts.length - 3) /
|
||||
(this.binsMax - this.binsMin));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a value to the histogram.
|
||||
*/
|
||||
public void add(long value, long count) {
|
||||
if (value < 0) {
|
||||
// The whole computation is completely thrown off if there are negative numbers
|
||||
//
|
||||
// Normally we would throw an IllegalArgumentException however this is the metrics
|
||||
// system and it should be completely safe at all times.
|
||||
// So silently throw it away.
|
||||
return;
|
||||
}
|
||||
AtomicUtils.updateMin(min, value);
|
||||
AtomicUtils.updateMax(max, value);
|
||||
|
||||
this.count.add(count);
|
||||
this.total.add(value * count);
|
||||
|
||||
int pos = getIndex(value);
|
||||
this.counts[pos].add(count);
|
||||
|
||||
// hasData needs to be updated as last
|
||||
this.hasData.set(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the quantiles give the ratios.
|
||||
*/
|
||||
public long[] getQuantiles(double[] quantiles) {
|
||||
if (!this.hasData.get()) {
|
||||
// No data yet.
|
||||
return new long[quantiles.length];
|
||||
}
|
||||
|
||||
// Make a snapshot of lowerCounter, higherCounter and bins.counts to counts.
|
||||
// This is not synchronized, but since the counter are accumulating, the result is a good
|
||||
// estimation of a snapshot.
|
||||
long[] counts = new long[this.counts.length];
|
||||
long total = 0L;
|
||||
for (int i = 0; i < this.counts.length; i++) {
|
||||
counts[i] = this.counts[i].get();
|
||||
total += counts[i];
|
||||
}
|
||||
|
||||
int rIndex = 0;
|
||||
double qCount = total * quantiles[0];
|
||||
long cum = 0L;
|
||||
|
||||
long[] res = new long[quantiles.length];
|
||||
countsLoop: for (int i = 0; i < counts.length; i++) {
|
||||
// mn and mx define a value range
|
||||
long mn, mx;
|
||||
if (i == 0) {
|
||||
mn = this.min.get();
|
||||
mx = this.binsMin;
|
||||
} else if (i == counts.length - 1) {
|
||||
mn = this.bins10XMax;
|
||||
mx = this.max.get();
|
||||
} else if (i == counts.length - 2) {
|
||||
mn = this.binsMax;
|
||||
mx = this.bins10XMax;
|
||||
} else {
|
||||
mn = this.binsMin + (i - 1) * (this.binsMax - this.binsMin) / (this.counts.length - 3);
|
||||
mx = this.binsMin + i * (this.binsMax - this.binsMin) / (this.counts.length - 3);
|
||||
}
|
||||
|
||||
if (mx < this.min.get()) {
|
||||
continue;
|
||||
}
|
||||
if (mn > this.max.get()) {
|
||||
break;
|
||||
}
|
||||
mn = Math.max(mn, this.min.get());
|
||||
mx = Math.min(mx, this.max.get());
|
||||
|
||||
// lastCum/cum are the corresponding counts to mn/mx
|
||||
double lastCum = cum;
|
||||
cum += counts[i];
|
||||
|
||||
// fill the results for qCount is within current range.
|
||||
while (qCount <= cum) {
|
||||
if (cum == lastCum) {
|
||||
res[rIndex] = mn;
|
||||
} else {
|
||||
res[rIndex] = (long) ((qCount - lastCum) * (mx - mn) / (cum - lastCum) + mn);
|
||||
}
|
||||
|
||||
// move to next quantile
|
||||
rIndex++;
|
||||
if (rIndex >= quantiles.length) {
|
||||
break countsLoop;
|
||||
}
|
||||
qCount = total * quantiles[rIndex];
|
||||
}
|
||||
}
|
||||
// In case quantiles contains values >= 100%
|
||||
for (; rIndex < quantiles.length; rIndex++) {
|
||||
res[rIndex] = this.max.get();
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
long getNumAtOrBelow(long val) {
|
||||
final int targetIndex = getIndex(val);
|
||||
long totalToCurrentIndex = 0;
|
||||
for (int i = 0; i <= targetIndex; i++) {
|
||||
totalToCurrentIndex += this.counts[i].get();
|
||||
}
|
||||
return totalToCurrentIndex;
|
||||
}
|
||||
}
|
||||
|
||||
// The bins counting values. It is replaced with a new one in calling of reset().
|
||||
private volatile Bins bins;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public FastLongHistogram() {
|
||||
this(DEFAULT_NBINS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param numOfBins the number of bins for the histogram. A larger value results in more precise
|
||||
* results but with lower efficiency, and vice versus.
|
||||
*/
|
||||
public FastLongHistogram(int numOfBins) {
|
||||
this.bins = new Bins(numOfBins);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor setting the bins assuming a uniform distribution within a range.
|
||||
* @param numOfBins the number of bins for the histogram. A larger value results in more precise
|
||||
* results but with lower efficiency, and vice versus.
|
||||
* @param min lower bound of the region, inclusive.
|
||||
* @param max higher bound of the region, inclusive.
|
||||
*/
|
||||
public FastLongHistogram(int numOfBins, long min, long max) {
|
||||
this(numOfBins);
|
||||
Bins bins = new Bins(numOfBins);
|
||||
bins.add(min, 1);
|
||||
bins.add(max, 1);
|
||||
this.bins = new Bins(bins, numOfBins, 0.01, 0.999);
|
||||
}
|
||||
|
||||
private FastLongHistogram(Bins bins) {
|
||||
this.bins = bins;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a value to the histogram.
|
||||
*/
|
||||
public void add(long value, long count) {
|
||||
this.bins.add(value, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the quantiles give the ratios.
|
||||
*/
|
||||
public long[] getQuantiles(double[] quantiles) {
|
||||
return this.bins.getQuantiles(quantiles);
|
||||
}
|
||||
|
||||
public long[] getQuantiles() {
|
||||
return this.bins.getQuantiles(DEFAULT_QUANTILES);
|
||||
}
|
||||
|
||||
public long getMin() {
|
||||
long min = this.bins.min.get();
|
||||
return min == Long.MAX_VALUE ? 0 : min; // in case it is not initialized
|
||||
}
|
||||
|
||||
public long getMax() {
|
||||
return this.bins.max.get();
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return this.bins.count.get();
|
||||
}
|
||||
|
||||
public long getMean() {
|
||||
Bins bins = this.bins;
|
||||
long count = bins.count.get();
|
||||
long total = bins.total.get();
|
||||
if (count == 0) {
|
||||
return 0;
|
||||
}
|
||||
return total / count;
|
||||
}
|
||||
|
||||
public long getNumAtOrBelow(long value) {
|
||||
return this.bins.getNumAtOrBelow(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the histogram for new counting.
|
||||
*/
|
||||
public FastLongHistogram reset() {
|
||||
Bins oldBins = this.bins;
|
||||
this.bins = new Bins(this.bins, this.bins.counts.length - 3, 0.01, 0.99);
|
||||
return new FastLongHistogram(oldBins);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,132 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* Testcases for FastLongHistogram.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestFastLongHistogram {
|
||||
|
||||
private static void doTestUniform(FastLongHistogram hist) {
|
||||
long[] VALUES = { 0, 10, 20, 30, 40, 50 };
|
||||
double[] qs = new double[VALUES.length];
|
||||
for (int i = 0; i < qs.length; i++) {
|
||||
qs[i] = (double) VALUES[i] / VALUES[VALUES.length - 1];
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
for (long v : VALUES) {
|
||||
hist.add(v, 1);
|
||||
}
|
||||
long[] vals = hist.getQuantiles(qs);
|
||||
System.out.println(Arrays.toString(vals));
|
||||
for (int j = 0; j < qs.length; j++) {
|
||||
Assert.assertTrue(j + "-th element org: " + VALUES[j] + ", act: " + vals[j],
|
||||
Math.abs(vals[j] - VALUES[j]) <= 10);
|
||||
}
|
||||
hist.reset();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUniform() {
|
||||
FastLongHistogram hist = new FastLongHistogram(100, 0, 50);
|
||||
doTestUniform(hist);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAdaptionOfChange() {
|
||||
// assumes the uniform distribution
|
||||
FastLongHistogram hist = new FastLongHistogram(100, 0, 100);
|
||||
|
||||
Random rand = new Random();
|
||||
|
||||
for (int n = 0; n < 10; n++) {
|
||||
for (int i = 0; i < 900; i++) {
|
||||
hist.add(rand.nextInt(100), 1);
|
||||
}
|
||||
|
||||
// add 10% outliers, this breaks the assumption, hope bin10xMax works
|
||||
for (int i = 0; i < 100; i++) {
|
||||
hist.add(1000 + rand.nextInt(100), 1);
|
||||
}
|
||||
|
||||
long[] vals = hist.getQuantiles(new double[] { 0.25, 0.75, 0.95 });
|
||||
System.out.println(Arrays.toString(vals));
|
||||
if (n == 0) {
|
||||
Assert.assertTrue("Out of possible value", vals[0] >= 0 && vals[0] <= 50);
|
||||
Assert.assertTrue("Out of possible value", vals[1] >= 50 && vals[1] <= 100);
|
||||
Assert.assertTrue("Out of possible value", vals[2] >= 900 && vals[2] <= 1100);
|
||||
}
|
||||
|
||||
hist.reset();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testGetNumAtOrBelow() {
|
||||
long[] VALUES = { 1, 10, 20, 30, 40, 50 };
|
||||
|
||||
FastLongHistogram h = new FastLongHistogram();
|
||||
for (long v : VALUES) {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
h.add(v, 1);
|
||||
}
|
||||
}
|
||||
|
||||
h.add(Integer.MAX_VALUE, 1);
|
||||
|
||||
h.reset();
|
||||
|
||||
for (long v : VALUES) {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
h.add(v, 1);
|
||||
}
|
||||
}
|
||||
// Add something way out there to make sure it doesn't throw off the counts.
|
||||
h.add(Integer.MAX_VALUE, 1);
|
||||
|
||||
assertEquals(100, h.getNumAtOrBelow(1));
|
||||
assertEquals(200, h.getNumAtOrBelow(11));
|
||||
assertEquals(601, h.getNumAtOrBelow(Long.MAX_VALUE));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSameValues() {
|
||||
FastLongHistogram hist = new FastLongHistogram(100);
|
||||
|
||||
hist.add(50, 100);
|
||||
|
||||
hist.reset();
|
||||
doTestUniform(hist);
|
||||
}
|
||||
}
|
|
@ -197,6 +197,7 @@
|
|||
<include>RowProcessor.proto</include>
|
||||
<include>SecureBulkLoad.proto</include>
|
||||
<include>Snapshot.proto</include>
|
||||
<include>Table.proto</include>
|
||||
<include>Tracing.proto</include>
|
||||
<include>VisibilityLabels.proto</include>
|
||||
<include>WAL.proto</include>
|
||||
|
|
|
@ -1285,11 +1285,11 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
|
||||
// optional bytes family = 2;
|
||||
/**
|
||||
|
@ -1377,11 +1377,11 @@ public final class AccessControlProtos {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
subBuilder = tableName_.toBuilder();
|
||||
}
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
|
||||
if (subBuilder != null) {
|
||||
subBuilder.mergeFrom(tableName_);
|
||||
tableName_ = subBuilder.buildPartial();
|
||||
|
@ -1477,7 +1477,7 @@ public final class AccessControlProtos {
|
|||
private int bitField0_;
|
||||
// optional .hbase.pb.TableName table_name = 1;
|
||||
public static final int TABLE_NAME_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -1487,13 +1487,13 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
return tableName_;
|
||||
}
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
return tableName_;
|
||||
}
|
||||
|
||||
|
@ -1552,7 +1552,7 @@ public final class AccessControlProtos {
|
|||
}
|
||||
|
||||
private void initFields() {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
family_ = com.google.protobuf.ByteString.EMPTY;
|
||||
qualifier_ = com.google.protobuf.ByteString.EMPTY;
|
||||
action_ = java.util.Collections.emptyList();
|
||||
|
@ -1797,7 +1797,7 @@ public final class AccessControlProtos {
|
|||
public Builder clear() {
|
||||
super.clear();
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
}
|
||||
|
@ -1926,9 +1926,9 @@ public final class AccessControlProtos {
|
|||
private int bitField0_;
|
||||
|
||||
// optional .hbase.pb.TableName table_name = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -1938,7 +1938,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
return tableName_;
|
||||
} else {
|
||||
|
@ -1948,7 +1948,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
|
@ -1965,7 +1965,7 @@ public final class AccessControlProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = builderForValue.build();
|
||||
onChanged();
|
||||
|
@ -1978,12 +1978,12 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001) &&
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ =
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
} else {
|
||||
tableName_ = value;
|
||||
}
|
||||
|
@ -1999,7 +1999,7 @@ public final class AccessControlProtos {
|
|||
*/
|
||||
public Builder clearTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
onChanged();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
|
@ -2010,7 +2010,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
|
||||
bitField0_ |= 0x00000001;
|
||||
onChanged();
|
||||
return getTableNameFieldBuilder().getBuilder();
|
||||
|
@ -2018,7 +2018,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
if (tableNameBuilder_ != null) {
|
||||
return tableNameBuilder_.getMessageOrBuilder();
|
||||
} else {
|
||||
|
@ -2029,11 +2029,11 @@ public final class AccessControlProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
|
||||
getTableNameFieldBuilder() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
|
||||
tableName_,
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
|
@ -7462,11 +7462,11 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
|
||||
// optional bytes namespace_name = 3;
|
||||
/**
|
||||
|
@ -7541,11 +7541,11 @@ public final class AccessControlProtos {
|
|||
break;
|
||||
}
|
||||
case 18: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
|
||||
if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
subBuilder = tableName_.toBuilder();
|
||||
}
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
|
||||
if (subBuilder != null) {
|
||||
subBuilder.mergeFrom(tableName_);
|
||||
tableName_ = subBuilder.buildPartial();
|
||||
|
@ -7616,7 +7616,7 @@ public final class AccessControlProtos {
|
|||
|
||||
// optional .hbase.pb.TableName table_name = 2;
|
||||
public static final int TABLE_NAME_FIELD_NUMBER = 2;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
|
@ -7626,13 +7626,13 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
return tableName_;
|
||||
}
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
return tableName_;
|
||||
}
|
||||
|
||||
|
@ -7654,7 +7654,7 @@ public final class AccessControlProtos {
|
|||
|
||||
private void initFields() {
|
||||
type_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Type.Global;
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
namespaceName_ = com.google.protobuf.ByteString.EMPTY;
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
|
@ -7881,7 +7881,7 @@ public final class AccessControlProtos {
|
|||
type_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Permission.Type.Global;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
}
|
||||
|
@ -8027,9 +8027,9 @@ public final class AccessControlProtos {
|
|||
}
|
||||
|
||||
// optional .hbase.pb.TableName table_name = 2;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
|
@ -8039,7 +8039,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
return tableName_;
|
||||
} else {
|
||||
|
@ -8049,7 +8049,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
|
@ -8066,7 +8066,7 @@ public final class AccessControlProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public Builder setTableName(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = builderForValue.build();
|
||||
onChanged();
|
||||
|
@ -8079,12 +8079,12 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000002) == 0x00000002) &&
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ =
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
} else {
|
||||
tableName_ = value;
|
||||
}
|
||||
|
@ -8100,7 +8100,7 @@ public final class AccessControlProtos {
|
|||
*/
|
||||
public Builder clearTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
onChanged();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
|
@ -8111,7 +8111,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
|
||||
bitField0_ |= 0x00000002;
|
||||
onChanged();
|
||||
return getTableNameFieldBuilder().getBuilder();
|
||||
|
@ -8119,7 +8119,7 @@ public final class AccessControlProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
if (tableNameBuilder_ != null) {
|
||||
return tableNameBuilder_.getMessageOrBuilder();
|
||||
} else {
|
||||
|
@ -8130,11 +8130,11 @@ public final class AccessControlProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 2;</code>
|
||||
*/
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
|
||||
getTableNameFieldBuilder() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
|
||||
tableName_,
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
|
@ -10499,7 +10499,7 @@ public final class AccessControlProtos {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\023AccessControl.proto\022\010hbase.pb\032\013HBase.p" +
|
||||
"\n\023AccessControl.proto\022\010hbase.pb\032\013Table.p" +
|
||||
"roto\"\314\002\n\nPermission\022\'\n\004type\030\001 \002(\0162\031.hbas" +
|
||||
"e.pb.Permission.Type\0225\n\021global_permissio" +
|
||||
"n\030\002 \001(\0132\032.hbase.pb.GlobalPermission\022;\n\024n" +
|
||||
|
@ -10648,7 +10648,7 @@ public final class AccessControlProtos {
|
|||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -2517,11 +2517,11 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code hbase.pb.PrepareBulkLoadRequest}
|
||||
|
@ -2575,11 +2575,11 @@ public final class SecureBulkLoadProtos {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
subBuilder = tableName_.toBuilder();
|
||||
}
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
|
||||
if (subBuilder != null) {
|
||||
subBuilder.mergeFrom(tableName_);
|
||||
tableName_ = subBuilder.buildPartial();
|
||||
|
@ -2629,7 +2629,7 @@ public final class SecureBulkLoadProtos {
|
|||
private int bitField0_;
|
||||
// required .hbase.pb.TableName table_name = 1;
|
||||
public static final int TABLE_NAME_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -2639,18 +2639,18 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
return tableName_;
|
||||
}
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
return tableName_;
|
||||
}
|
||||
|
||||
private void initFields() {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
|
@ -2844,7 +2844,7 @@ public final class SecureBulkLoadProtos {
|
|||
public Builder clear() {
|
||||
super.clear();
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
}
|
||||
|
@ -2940,9 +2940,9 @@ public final class SecureBulkLoadProtos {
|
|||
private int bitField0_;
|
||||
|
||||
// required .hbase.pb.TableName table_name = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -2952,7 +2952,7 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
return tableName_;
|
||||
} else {
|
||||
|
@ -2962,7 +2962,7 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
|
@ -2979,7 +2979,7 @@ public final class SecureBulkLoadProtos {
|
|||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = builderForValue.build();
|
||||
onChanged();
|
||||
|
@ -2992,12 +2992,12 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001) &&
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ =
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
} else {
|
||||
tableName_ = value;
|
||||
}
|
||||
|
@ -3013,7 +3013,7 @@ public final class SecureBulkLoadProtos {
|
|||
*/
|
||||
public Builder clearTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
onChanged();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
|
@ -3024,7 +3024,7 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
|
||||
bitField0_ |= 0x00000001;
|
||||
onChanged();
|
||||
return getTableNameFieldBuilder().getBuilder();
|
||||
|
@ -3032,7 +3032,7 @@ public final class SecureBulkLoadProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
if (tableNameBuilder_ != null) {
|
||||
return tableNameBuilder_.getMessageOrBuilder();
|
||||
} else {
|
||||
|
@ -3043,11 +3043,11 @@ public final class SecureBulkLoadProtos {
|
|||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
|
||||
getTableNameFieldBuilder() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
|
||||
tableName_,
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
|
@ -4857,30 +4857,31 @@ public final class SecureBulkLoadProtos {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\024SecureBulkLoad.proto\022\010hbase.pb\032\013HBase." +
|
||||
"proto\032\014Client.proto\"\266\001\n\033SecureBulkLoadHF" +
|
||||
"ilesRequest\022>\n\013family_path\030\001 \003(\0132).hbase" +
|
||||
".pb.BulkLoadHFileRequest.FamilyPath\022\026\n\016a" +
|
||||
"ssign_seq_num\030\002 \001(\010\022+\n\010fs_token\030\003 \002(\0132\031." +
|
||||
"hbase.pb.DelegationToken\022\022\n\nbulk_token\030\004" +
|
||||
" \002(\t\".\n\034SecureBulkLoadHFilesResponse\022\016\n\006" +
|
||||
"loaded\030\001 \002(\010\"V\n\017DelegationToken\022\022\n\nident" +
|
||||
"ifier\030\001 \001(\014\022\020\n\010password\030\002 \001(\014\022\014\n\004kind\030\003 " +
|
||||
"\001(\t\022\017\n\007service\030\004 \001(\t\"A\n\026PrepareBulkLoadR",
|
||||
"equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" +
|
||||
"bleName\"-\n\027PrepareBulkLoadResponse\022\022\n\nbu" +
|
||||
"lk_token\030\001 \002(\t\",\n\026CleanupBulkLoadRequest" +
|
||||
"\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027CleanupBulkLoadR" +
|
||||
"esponse2\256\002\n\025SecureBulkLoadService\022V\n\017Pre" +
|
||||
"pareBulkLoad\022 .hbase.pb.PrepareBulkLoadR" +
|
||||
"equest\032!.hbase.pb.PrepareBulkLoadRespons" +
|
||||
"e\022e\n\024SecureBulkLoadHFiles\022%.hbase.pb.Sec" +
|
||||
"ureBulkLoadHFilesRequest\032&.hbase.pb.Secu" +
|
||||
"reBulkLoadHFilesResponse\022V\n\017CleanupBulkL",
|
||||
"oad\022 .hbase.pb.CleanupBulkLoadRequest\032!." +
|
||||
"hbase.pb.CleanupBulkLoadResponseBJ\n*org." +
|
||||
"apache.hadoop.hbase.protobuf.generatedB\024" +
|
||||
"SecureBulkLoadProtosH\001\210\001\001\240\001\001"
|
||||
"\n\024SecureBulkLoad.proto\022\010hbase.pb\032\013Table." +
|
||||
"proto\032\013HBase.proto\032\014Client.proto\"\266\001\n\033Sec" +
|
||||
"ureBulkLoadHFilesRequest\022>\n\013family_path\030" +
|
||||
"\001 \003(\0132).hbase.pb.BulkLoadHFileRequest.Fa" +
|
||||
"milyPath\022\026\n\016assign_seq_num\030\002 \001(\010\022+\n\010fs_t" +
|
||||
"oken\030\003 \002(\0132\031.hbase.pb.DelegationToken\022\022\n" +
|
||||
"\nbulk_token\030\004 \002(\t\".\n\034SecureBulkLoadHFile" +
|
||||
"sResponse\022\016\n\006loaded\030\001 \002(\010\"V\n\017DelegationT" +
|
||||
"oken\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password\030\002 \001" +
|
||||
"(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"A\n\026Pre",
|
||||
"pareBulkLoadRequest\022\'\n\ntable_name\030\001 \002(\0132" +
|
||||
"\023.hbase.pb.TableName\"-\n\027PrepareBulkLoadR" +
|
||||
"esponse\022\022\n\nbulk_token\030\001 \002(\t\",\n\026CleanupBu" +
|
||||
"lkLoadRequest\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027Cle" +
|
||||
"anupBulkLoadResponse2\256\002\n\025SecureBulkLoadS" +
|
||||
"ervice\022V\n\017PrepareBulkLoad\022 .hbase.pb.Pre" +
|
||||
"pareBulkLoadRequest\032!.hbase.pb.PrepareBu" +
|
||||
"lkLoadResponse\022e\n\024SecureBulkLoadHFiles\022%" +
|
||||
".hbase.pb.SecureBulkLoadHFilesRequest\032&." +
|
||||
"hbase.pb.SecureBulkLoadHFilesResponse\022V\n",
|
||||
"\017CleanupBulkLoad\022 .hbase.pb.CleanupBulkL" +
|
||||
"oadRequest\032!.hbase.pb.CleanupBulkLoadRes" +
|
||||
"ponseBJ\n*org.apache.hadoop.hbase.protobu" +
|
||||
"f.generatedB\024SecureBulkLoadProtosH\001\210\001\001\240\001" +
|
||||
"\001"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
|
@ -4935,6 +4936,7 @@ public final class SecureBulkLoadProtos {
|
|||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
|
||||
}, assigner);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,607 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: Table.proto
|
||||
|
||||
package org.apache.hadoop.hbase.protobuf.generated;
|
||||
|
||||
public final class TableProtos {
|
||||
private TableProtos() {}
|
||||
public static void registerAllExtensions(
|
||||
com.google.protobuf.ExtensionRegistry registry) {
|
||||
}
|
||||
public interface TableNameOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
// required bytes namespace = 1;
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
boolean hasNamespace();
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
com.google.protobuf.ByteString getNamespace();
|
||||
|
||||
// required bytes qualifier = 2;
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
boolean hasQualifier();
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
com.google.protobuf.ByteString getQualifier();
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code hbase.pb.TableName}
|
||||
*
|
||||
* <pre>
|
||||
**
|
||||
* Table Name
|
||||
* </pre>
|
||||
*/
|
||||
public static final class TableName extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements TableNameOrBuilder {
|
||||
// Use TableName.newBuilder() to construct.
|
||||
private TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private TableName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private static final TableName defaultInstance;
|
||||
public static TableName getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public TableName getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private TableName(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
int mutable_bitField0_ = 0;
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
bitField0_ |= 0x00000001;
|
||||
namespace_ = input.readBytes();
|
||||
break;
|
||||
}
|
||||
case 18: {
|
||||
bitField0_ |= 0x00000002;
|
||||
qualifier_ = input.readBytes();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder.class);
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<TableName> PARSER =
|
||||
new com.google.protobuf.AbstractParser<TableName>() {
|
||||
public TableName parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new TableName(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<TableName> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
// required bytes namespace = 1;
|
||||
public static final int NAMESPACE_FIELD_NUMBER = 1;
|
||||
private com.google.protobuf.ByteString namespace_;
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
public boolean hasNamespace() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
public com.google.protobuf.ByteString getNamespace() {
|
||||
return namespace_;
|
||||
}
|
||||
|
||||
// required bytes qualifier = 2;
|
||||
public static final int QUALIFIER_FIELD_NUMBER = 2;
|
||||
private com.google.protobuf.ByteString qualifier_;
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
public boolean hasQualifier() {
|
||||
return ((bitField0_ & 0x00000002) == 0x00000002);
|
||||
}
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
public com.google.protobuf.ByteString getQualifier() {
|
||||
return qualifier_;
|
||||
}
|
||||
|
||||
private void initFields() {
|
||||
namespace_ = com.google.protobuf.ByteString.EMPTY;
|
||||
qualifier_ = com.google.protobuf.ByteString.EMPTY;
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
if (!hasNamespace()) {
|
||||
memoizedIsInitialized = 0;
|
||||
return false;
|
||||
}
|
||||
if (!hasQualifier()) {
|
||||
memoizedIsInitialized = 0;
|
||||
return false;
|
||||
}
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
output.writeBytes(1, namespace_);
|
||||
}
|
||||
if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
output.writeBytes(2, qualifier_);
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeBytesSize(1, namespace_);
|
||||
}
|
||||
if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeBytesSize(2, qualifier_);
|
||||
}
|
||||
size += getUnknownFields().getSerializedSize();
|
||||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName other = (org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (hasNamespace() == other.hasNamespace());
|
||||
if (hasNamespace()) {
|
||||
result = result && getNamespace()
|
||||
.equals(other.getNamespace());
|
||||
}
|
||||
result = result && (hasQualifier() == other.hasQualifier());
|
||||
if (hasQualifier()) {
|
||||
result = result && getQualifier()
|
||||
.equals(other.getQualifier());
|
||||
}
|
||||
result = result &&
|
||||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
if (hasNamespace()) {
|
||||
hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getNamespace().hashCode();
|
||||
}
|
||||
if (hasQualifier()) {
|
||||
hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getQualifier().hashCode();
|
||||
}
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code hbase.pb.TableName}
|
||||
*
|
||||
* <pre>
|
||||
**
|
||||
* Table Name
|
||||
* </pre>
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder {
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
namespace_ = com.google.protobuf.ByteString.EMPTY;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
qualifier_ = com.google.protobuf.ByteString.EMPTY;
|
||||
bitField0_ = (bitField0_ & ~0x00000002);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.TableProtos.internal_static_hbase_pb_TableName_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName result = new org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
int to_bitField0_ = 0;
|
||||
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
to_bitField0_ |= 0x00000001;
|
||||
}
|
||||
result.namespace_ = namespace_;
|
||||
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
to_bitField0_ |= 0x00000002;
|
||||
}
|
||||
result.qualifier_ = qualifier_;
|
||||
result.bitField0_ = to_bitField0_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) return this;
|
||||
if (other.hasNamespace()) {
|
||||
setNamespace(other.getNamespace());
|
||||
}
|
||||
if (other.hasQualifier()) {
|
||||
setQualifier(other.getQualifier());
|
||||
}
|
||||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
if (!hasNamespace()) {
|
||||
|
||||
return false;
|
||||
}
|
||||
if (!hasQualifier()) {
|
||||
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
private int bitField0_;
|
||||
|
||||
// required bytes namespace = 1;
|
||||
private com.google.protobuf.ByteString namespace_ = com.google.protobuf.ByteString.EMPTY;
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
public boolean hasNamespace() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
public com.google.protobuf.ByteString getNamespace() {
|
||||
return namespace_;
|
||||
}
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
public Builder setNamespace(com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
bitField0_ |= 0x00000001;
|
||||
namespace_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>required bytes namespace = 1;</code>
|
||||
*/
|
||||
public Builder clearNamespace() {
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
namespace_ = getDefaultInstance().getNamespace();
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
// required bytes qualifier = 2;
|
||||
private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
public boolean hasQualifier() {
|
||||
return ((bitField0_ & 0x00000002) == 0x00000002);
|
||||
}
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
public com.google.protobuf.ByteString getQualifier() {
|
||||
return qualifier_;
|
||||
}
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
public Builder setQualifier(com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
bitField0_ |= 0x00000002;
|
||||
qualifier_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>required bytes qualifier = 2;</code>
|
||||
*/
|
||||
public Builder clearQualifier() {
|
||||
bitField0_ = (bitField0_ & ~0x00000002);
|
||||
qualifier_ = getDefaultInstance().getQualifier();
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:hbase.pb.TableName)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new TableName(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:hbase.pb.TableName)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_hbase_pb_TableName_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_hbase_pb_TableName_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\013Table.proto\022\010hbase.pb\"1\n\tTableName\022\021\n\t" +
|
||||
"namespace\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014B>\n*or" +
|
||||
"g.apache.hadoop.hbase.protobuf.generated" +
|
||||
"B\013TableProtosH\001\240\001\001"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_hbase_pb_TableName_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_hbase_pb_TableName_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_hbase_pb_TableName_descriptor,
|
||||
new java.lang.String[] { "Namespace", "Qualifier", });
|
||||
return null;
|
||||
}
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
|
@ -8813,11 +8813,11 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
|
||||
// required bytes encoded_region_name = 2;
|
||||
/**
|
||||
|
@ -8921,11 +8921,11 @@ public final class WALProtos {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
subBuilder = tableName_.toBuilder();
|
||||
}
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
|
||||
if (subBuilder != null) {
|
||||
subBuilder.mergeFrom(tableName_);
|
||||
tableName_ = subBuilder.buildPartial();
|
||||
|
@ -8996,7 +8996,7 @@ public final class WALProtos {
|
|||
private int bitField0_;
|
||||
// required .hbase.pb.TableName table_name = 1;
|
||||
public static final int TABLE_NAME_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -9006,13 +9006,13 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
return tableName_;
|
||||
}
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
return tableName_;
|
||||
}
|
||||
|
||||
|
@ -9085,7 +9085,7 @@ public final class WALProtos {
|
|||
}
|
||||
|
||||
private void initFields() {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
|
||||
stores_ = java.util.Collections.emptyList();
|
||||
bulkloadSeqNum_ = 0L;
|
||||
|
@ -9347,7 +9347,7 @@ public final class WALProtos {
|
|||
public Builder clear() {
|
||||
super.clear();
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
}
|
||||
|
@ -9516,9 +9516,9 @@ public final class WALProtos {
|
|||
private int bitField0_;
|
||||
|
||||
// required .hbase.pb.TableName table_name = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -9528,7 +9528,7 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
return tableName_;
|
||||
} else {
|
||||
|
@ -9538,7 +9538,7 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
|
@ -9555,7 +9555,7 @@ public final class WALProtos {
|
|||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = builderForValue.build();
|
||||
onChanged();
|
||||
|
@ -9568,12 +9568,12 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001) &&
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ =
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
} else {
|
||||
tableName_ = value;
|
||||
}
|
||||
|
@ -9589,7 +9589,7 @@ public final class WALProtos {
|
|||
*/
|
||||
public Builder clearTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
onChanged();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
|
@ -9600,7 +9600,7 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
|
||||
bitField0_ |= 0x00000001;
|
||||
onChanged();
|
||||
return getTableNameFieldBuilder().getBuilder();
|
||||
|
@ -9608,7 +9608,7 @@ public final class WALProtos {
|
|||
/**
|
||||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
if (tableNameBuilder_ != null) {
|
||||
return tableNameBuilder_.getMessageOrBuilder();
|
||||
} else {
|
||||
|
@ -9619,11 +9619,11 @@ public final class WALProtos {
|
|||
* <code>required .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
|
||||
getTableNameFieldBuilder() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
|
||||
tableName_,
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
|
@ -11978,56 +11978,56 @@ public final class WALProtos {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\tWAL.proto\022\010hbase.pb\032\013HBase.proto\032\014Clie" +
|
||||
"nt.proto\"\217\001\n\tWALHeader\022\027\n\017has_compressio" +
|
||||
"n\030\001 \001(\010\022\026\n\016encryption_key\030\002 \001(\014\022\033\n\023has_t" +
|
||||
"ag_compression\030\003 \001(\010\022\027\n\017writer_cls_name\030" +
|
||||
"\004 \001(\t\022\033\n\023cell_codec_cls_name\030\005 \001(\t\"\273\002\n\006W" +
|
||||
"ALKey\022\033\n\023encoded_region_name\030\001 \002(\014\022\022\n\nta" +
|
||||
"ble_name\030\002 \002(\014\022\033\n\023log_sequence_number\030\003 " +
|
||||
"\002(\004\022\022\n\nwrite_time\030\004 \002(\004\022&\n\ncluster_id\030\005 " +
|
||||
"\001(\0132\016.hbase.pb.UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132" +
|
||||
"\025.hbase.pb.FamilyScope\022\032\n\022following_kv_c",
|
||||
"ount\030\007 \001(\r\022#\n\013cluster_ids\030\010 \003(\0132\016.hbase." +
|
||||
"pb.UUID\022\022\n\nnonceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001" +
|
||||
"(\004\022\034\n\024orig_sequence_number\030\013 \001(\004\"F\n\013Fami" +
|
||||
"lyScope\022\016\n\006family\030\001 \002(\014\022\'\n\nscope_type\030\002 " +
|
||||
"\002(\0162\023.hbase.pb.ScopeType\"\276\001\n\024CompactionD" +
|
||||
"escriptor\022\022\n\ntable_name\030\001 \002(\014\022\033\n\023encoded" +
|
||||
"_region_name\030\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022" +
|
||||
"\030\n\020compaction_input\030\004 \003(\t\022\031\n\021compaction_" +
|
||||
"output\030\005 \003(\t\022\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013" +
|
||||
"region_name\030\007 \001(\014\"\244\003\n\017FlushDescriptor\0225\n",
|
||||
"\006action\030\001 \002(\0162%.hbase.pb.FlushDescriptor" +
|
||||
".FlushAction\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023enco" +
|
||||
"ded_region_name\030\003 \002(\014\022\035\n\025flush_sequence_" +
|
||||
"number\030\004 \001(\004\022E\n\rstore_flushes\030\005 \003(\0132..hb" +
|
||||
"ase.pb.FlushDescriptor.StoreFlushDescrip" +
|
||||
"tor\022\023\n\013region_name\030\006 \001(\014\032Y\n\024StoreFlushDe" +
|
||||
"scriptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_h" +
|
||||
"ome_dir\030\002 \002(\t\022\024\n\014flush_output\030\003 \003(\t\"S\n\013F" +
|
||||
"lushAction\022\017\n\013START_FLUSH\020\000\022\020\n\014COMMIT_FL" +
|
||||
"USH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003",
|
||||
"\"q\n\017StoreDescriptor\022\023\n\013family_name\030\001 \002(\014" +
|
||||
"\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n\nstore_file\030\003" +
|
||||
" \003(\t\022\035\n\025store_file_size_bytes\030\004 \001(\004\"\237\001\n\022" +
|
||||
"BulkLoadDescriptor\022\'\n\ntable_name\030\001 \002(\0132\023" +
|
||||
".hbase.pb.TableName\022\033\n\023encoded_region_na" +
|
||||
"me\030\002 \002(\014\022)\n\006stores\030\003 \003(\0132\031.hbase.pb.Stor" +
|
||||
"eDescriptor\022\030\n\020bulkload_seq_num\030\004 \002(\003\"\272\002" +
|
||||
"\n\025RegionEventDescriptor\022=\n\nevent_type\030\001 " +
|
||||
"\002(\0162).hbase.pb.RegionEventDescriptor.Eve" +
|
||||
"ntType\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023encoded_re",
|
||||
"gion_name\030\003 \002(\014\022\033\n\023log_sequence_number\030\004" +
|
||||
" \001(\004\022)\n\006stores\030\005 \003(\0132\031.hbase.pb.StoreDes" +
|
||||
"criptor\022$\n\006server\030\006 \001(\0132\024.hbase.pb.Serve" +
|
||||
"rName\022\023\n\013region_name\030\007 \001(\014\".\n\tEventType\022" +
|
||||
"\017\n\013REGION_OPEN\020\000\022\020\n\014REGION_CLOSE\020\001\"\014\n\nWA" +
|
||||
"LTrailer*d\n\tScopeType\022\033\n\027REPLICATION_SCO" +
|
||||
"PE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLOBAL\020\001" +
|
||||
"\022\034\n\030REPLICATION_SCOPE_SERIAL\020\002B?\n*org.ap" +
|
||||
"ache.hadoop.hbase.protobuf.generatedB\tWA" +
|
||||
"LProtosH\001\210\001\000\240\001\001"
|
||||
"\n\tWAL.proto\022\010hbase.pb\032\013Table.proto\032\013HBas" +
|
||||
"e.proto\032\014Client.proto\"\217\001\n\tWALHeader\022\027\n\017h" +
|
||||
"as_compression\030\001 \001(\010\022\026\n\016encryption_key\030\002" +
|
||||
" \001(\014\022\033\n\023has_tag_compression\030\003 \001(\010\022\027\n\017wri" +
|
||||
"ter_cls_name\030\004 \001(\t\022\033\n\023cell_codec_cls_nam" +
|
||||
"e\030\005 \001(\t\"\273\002\n\006WALKey\022\033\n\023encoded_region_nam" +
|
||||
"e\030\001 \002(\014\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023log_seque" +
|
||||
"nce_number\030\003 \002(\004\022\022\n\nwrite_time\030\004 \002(\004\022&\n\n" +
|
||||
"cluster_id\030\005 \001(\0132\016.hbase.pb.UUIDB\002\030\001\022%\n\006" +
|
||||
"scopes\030\006 \003(\0132\025.hbase.pb.FamilyScope\022\032\n\022f",
|
||||
"ollowing_kv_count\030\007 \001(\r\022#\n\013cluster_ids\030\010" +
|
||||
" \003(\0132\016.hbase.pb.UUID\022\022\n\nnonceGroup\030\t \001(\004" +
|
||||
"\022\r\n\005nonce\030\n \001(\004\022\034\n\024orig_sequence_number\030" +
|
||||
"\013 \001(\004\"F\n\013FamilyScope\022\016\n\006family\030\001 \002(\014\022\'\n\n" +
|
||||
"scope_type\030\002 \002(\0162\023.hbase.pb.ScopeType\"\276\001" +
|
||||
"\n\024CompactionDescriptor\022\022\n\ntable_name\030\001 \002" +
|
||||
"(\014\022\033\n\023encoded_region_name\030\002 \002(\014\022\023\n\013famil" +
|
||||
"y_name\030\003 \002(\014\022\030\n\020compaction_input\030\004 \003(\t\022\031" +
|
||||
"\n\021compaction_output\030\005 \003(\t\022\026\n\016store_home_" +
|
||||
"dir\030\006 \002(\t\022\023\n\013region_name\030\007 \001(\014\"\244\003\n\017Flush",
|
||||
"Descriptor\0225\n\006action\030\001 \002(\0162%.hbase.pb.Fl" +
|
||||
"ushDescriptor.FlushAction\022\022\n\ntable_name\030" +
|
||||
"\002 \002(\014\022\033\n\023encoded_region_name\030\003 \002(\014\022\035\n\025fl" +
|
||||
"ush_sequence_number\030\004 \001(\004\022E\n\rstore_flush" +
|
||||
"es\030\005 \003(\0132..hbase.pb.FlushDescriptor.Stor" +
|
||||
"eFlushDescriptor\022\023\n\013region_name\030\006 \001(\014\032Y\n" +
|
||||
"\024StoreFlushDescriptor\022\023\n\013family_name\030\001 \002" +
|
||||
"(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\024\n\014flush_outp" +
|
||||
"ut\030\003 \003(\t\"S\n\013FlushAction\022\017\n\013START_FLUSH\020\000" +
|
||||
"\022\020\n\014COMMIT_FLUSH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020\n\014C",
|
||||
"ANNOT_FLUSH\020\003\"q\n\017StoreDescriptor\022\023\n\013fami" +
|
||||
"ly_name\030\001 \002(\014\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n" +
|
||||
"\nstore_file\030\003 \003(\t\022\035\n\025store_file_size_byt" +
|
||||
"es\030\004 \001(\004\"\237\001\n\022BulkLoadDescriptor\022\'\n\ntable" +
|
||||
"_name\030\001 \002(\0132\023.hbase.pb.TableName\022\033\n\023enco" +
|
||||
"ded_region_name\030\002 \002(\014\022)\n\006stores\030\003 \003(\0132\031." +
|
||||
"hbase.pb.StoreDescriptor\022\030\n\020bulkload_seq" +
|
||||
"_num\030\004 \002(\003\"\272\002\n\025RegionEventDescriptor\022=\n\n" +
|
||||
"event_type\030\001 \002(\0162).hbase.pb.RegionEventD" +
|
||||
"escriptor.EventType\022\022\n\ntable_name\030\002 \002(\014\022",
|
||||
"\033\n\023encoded_region_name\030\003 \002(\014\022\033\n\023log_sequ" +
|
||||
"ence_number\030\004 \001(\004\022)\n\006stores\030\005 \003(\0132\031.hbas" +
|
||||
"e.pb.StoreDescriptor\022$\n\006server\030\006 \001(\0132\024.h" +
|
||||
"base.pb.ServerName\022\023\n\013region_name\030\007 \001(\014\"" +
|
||||
".\n\tEventType\022\017\n\013REGION_OPEN\020\000\022\020\n\014REGION_" +
|
||||
"CLOSE\020\001\"\014\n\nWALTrailer*d\n\tScopeType\022\033\n\027RE" +
|
||||
"PLICATION_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_S" +
|
||||
"COPE_GLOBAL\020\001\022\034\n\030REPLICATION_SCOPE_SERIA" +
|
||||
"L\020\002B?\n*org.apache.hadoop.hbase.protobuf." +
|
||||
"generatedB\tWALProtosH\001\210\001\000\240\001\001"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
|
@ -12100,6 +12100,7 @@ public final class WALProtos {
|
|||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
|
||||
}, assigner);
|
||||
|
|
|
@ -5043,11 +5043,11 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
|
||||
// repeated bytes families = 2;
|
||||
/**
|
||||
|
@ -5115,11 +5115,11 @@ public final class ZooKeeperProtos {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
subBuilder = tableName_.toBuilder();
|
||||
}
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
|
||||
if (subBuilder != null) {
|
||||
subBuilder.mergeFrom(tableName_);
|
||||
tableName_ = subBuilder.buildPartial();
|
||||
|
@ -5180,7 +5180,7 @@ public final class ZooKeeperProtos {
|
|||
private int bitField0_;
|
||||
// optional .hbase.pb.TableName table_name = 1;
|
||||
public static final int TABLE_NAME_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -5190,13 +5190,13 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
return tableName_;
|
||||
}
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
return tableName_;
|
||||
}
|
||||
|
||||
|
@ -5224,7 +5224,7 @@ public final class ZooKeeperProtos {
|
|||
}
|
||||
|
||||
private void initFields() {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
families_ = java.util.Collections.emptyList();
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
|
@ -5435,7 +5435,7 @@ public final class ZooKeeperProtos {
|
|||
public Builder clear() {
|
||||
super.clear();
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
}
|
||||
|
@ -5546,9 +5546,9 @@ public final class ZooKeeperProtos {
|
|||
private int bitField0_;
|
||||
|
||||
// optional .hbase.pb.TableName table_name = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -5558,7 +5558,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
return tableName_;
|
||||
} else {
|
||||
|
@ -5568,7 +5568,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
|
@ -5585,7 +5585,7 @@ public final class ZooKeeperProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = builderForValue.build();
|
||||
onChanged();
|
||||
|
@ -5598,12 +5598,12 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001) &&
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ =
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
} else {
|
||||
tableName_ = value;
|
||||
}
|
||||
|
@ -5619,7 +5619,7 @@ public final class ZooKeeperProtos {
|
|||
*/
|
||||
public Builder clearTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
onChanged();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
|
@ -5630,7 +5630,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
|
||||
bitField0_ |= 0x00000001;
|
||||
onChanged();
|
||||
return getTableNameFieldBuilder().getBuilder();
|
||||
|
@ -5638,7 +5638,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
if (tableNameBuilder_ != null) {
|
||||
return tableNameBuilder_.getMessageOrBuilder();
|
||||
} else {
|
||||
|
@ -5649,11 +5649,11 @@ public final class ZooKeeperProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
|
||||
getTableNameFieldBuilder() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
|
||||
tableName_,
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
|
@ -9260,11 +9260,11 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName();
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder();
|
||||
|
||||
// optional .hbase.pb.ServerName lock_owner = 2;
|
||||
/**
|
||||
|
@ -9382,11 +9382,11 @@ public final class ZooKeeperProtos {
|
|||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder subBuilder = null;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
subBuilder = tableName_.toBuilder();
|
||||
}
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
|
||||
tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.PARSER, extensionRegistry);
|
||||
if (subBuilder != null) {
|
||||
subBuilder.mergeFrom(tableName_);
|
||||
tableName_ = subBuilder.buildPartial();
|
||||
|
@ -9469,7 +9469,7 @@ public final class ZooKeeperProtos {
|
|||
private int bitField0_;
|
||||
// optional .hbase.pb.TableName table_name = 1;
|
||||
public static final int TABLE_NAME_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -9479,13 +9479,13 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
return tableName_;
|
||||
}
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
return tableName_;
|
||||
}
|
||||
|
||||
|
@ -9603,7 +9603,7 @@ public final class ZooKeeperProtos {
|
|||
}
|
||||
|
||||
private void initFields() {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
|
||||
threadId_ = 0L;
|
||||
isShared_ = false;
|
||||
|
@ -9892,7 +9892,7 @@ public final class ZooKeeperProtos {
|
|||
public Builder clear() {
|
||||
super.clear();
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
}
|
||||
|
@ -10047,9 +10047,9 @@ public final class ZooKeeperProtos {
|
|||
private int bitField0_;
|
||||
|
||||
// optional .hbase.pb.TableName table_name = 1;
|
||||
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
private org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder> tableNameBuilder_;
|
||||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
|
@ -10059,7 +10059,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName getTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
return tableName_;
|
||||
} else {
|
||||
|
@ -10069,7 +10069,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
|
@ -10086,7 +10086,7 @@ public final class ZooKeeperProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder setTableName(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder builderForValue) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = builderForValue.build();
|
||||
onChanged();
|
||||
|
@ -10099,12 +10099,12 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
|
||||
public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName value) {
|
||||
if (tableNameBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001) &&
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ != org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance()) {
|
||||
tableName_ =
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
|
||||
} else {
|
||||
tableName_ = value;
|
||||
}
|
||||
|
@ -10120,7 +10120,7 @@ public final class ZooKeeperProtos {
|
|||
*/
|
||||
public Builder clearTableName() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
|
||||
tableName_ = org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.getDefaultInstance();
|
||||
onChanged();
|
||||
} else {
|
||||
tableNameBuilder_.clear();
|
||||
|
@ -10131,7 +10131,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder getTableNameBuilder() {
|
||||
bitField0_ |= 0x00000001;
|
||||
onChanged();
|
||||
return getTableNameFieldBuilder().getBuilder();
|
||||
|
@ -10139,7 +10139,7 @@ public final class ZooKeeperProtos {
|
|||
/**
|
||||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
public org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder getTableNameOrBuilder() {
|
||||
if (tableNameBuilder_ != null) {
|
||||
return tableNameBuilder_.getMessageOrBuilder();
|
||||
} else {
|
||||
|
@ -10150,11 +10150,11 @@ public final class ZooKeeperProtos {
|
|||
* <code>optional .hbase.pb.TableName table_name = 1;</code>
|
||||
*/
|
||||
private com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>
|
||||
getTableNameFieldBuilder() {
|
||||
if (tableNameBuilder_ == null) {
|
||||
tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.TableProtos.TableNameOrBuilder>(
|
||||
tableName_,
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
|
@ -10982,47 +10982,47 @@ public final class ZooKeeperProtos {
|
|||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\017ZooKeeper.proto\022\010hbase.pb\032\013HBase.proto" +
|
||||
"\032\023ClusterStatus.proto\"y\n\020MetaRegionServe" +
|
||||
"r\022$\n\006server\030\001 \002(\0132\024.hbase.pb.ServerName\022" +
|
||||
"\023\n\013rpc_version\030\002 \001(\r\022*\n\005state\030\003 \001(\0162\033.hb" +
|
||||
"ase.pb.RegionState.State\"V\n\006Master\022$\n\006ma" +
|
||||
"ster\030\001 \002(\0132\024.hbase.pb.ServerName\022\023\n\013rpc_" +
|
||||
"version\030\002 \001(\r\022\021\n\tinfo_port\030\003 \001(\r\"\037\n\tClus" +
|
||||
"terUp\022\022\n\nstart_date\030\001 \002(\t\"\221\001\n\020RegionTran" +
|
||||
"sition\022\027\n\017event_type_code\030\001 \002(\r\022\023\n\013regio" +
|
||||
"n_name\030\002 \002(\014\022\023\n\013create_time\030\003 \002(\004\022)\n\013ser",
|
||||
"ver_name\030\004 \002(\0132\024.hbase.pb.ServerName\022\017\n\007" +
|
||||
"payload\030\005 \001(\014\"\247\002\n\014SplitLogTask\022+\n\005state\030" +
|
||||
"\001 \002(\0162\034.hbase.pb.SplitLogTask.State\022)\n\013s" +
|
||||
"erver_name\030\002 \002(\0132\024.hbase.pb.ServerName\022:" +
|
||||
"\n\004mode\030\003 \001(\0162#.hbase.pb.SplitLogTask.Rec" +
|
||||
"overyMode:\007UNKNOWN\"C\n\005State\022\016\n\nUNASSIGNE" +
|
||||
"D\020\000\022\t\n\005OWNED\020\001\022\014\n\010RESIGNED\020\002\022\010\n\004DONE\020\003\022\007" +
|
||||
"\n\003ERR\020\004\">\n\014RecoveryMode\022\013\n\007UNKNOWN\020\000\022\021\n\r" +
|
||||
"LOG_SPLITTING\020\001\022\016\n\nLOG_REPLAY\020\002\"w\n\005Table" +
|
||||
"\022-\n\005state\030\001 \002(\0162\025.hbase.pb.Table.State:\007",
|
||||
"ENABLED\"?\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLE" +
|
||||
"D\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLING\020\003\"D\n\007Tabl" +
|
||||
"eCF\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.pb.Table" +
|
||||
"Name\022\020\n\010families\030\002 \003(\014\"\330\001\n\017ReplicationPe" +
|
||||
"er\022\022\n\nclusterkey\030\001 \002(\t\022\037\n\027replicationEnd" +
|
||||
"pointImpl\030\002 \001(\t\022&\n\004data\030\003 \003(\0132\030.hbase.pb" +
|
||||
".BytesBytesPair\022/\n\rconfiguration\030\004 \003(\0132\030" +
|
||||
".hbase.pb.NameStringPair\022$\n\ttable_cfs\030\005 " +
|
||||
"\003(\0132\021.hbase.pb.TableCF\022\021\n\tbandwidth\030\006 \001(" +
|
||||
"\003\"g\n\020ReplicationState\022/\n\005state\030\001 \002(\0162 .h",
|
||||
"base.pb.ReplicationState.State\"\"\n\005State\022" +
|
||||
"\013\n\007ENABLED\020\000\022\014\n\010DISABLED\020\001\"+\n\027Replicatio" +
|
||||
"nHLogPosition\022\020\n\010position\030\001 \002(\003\"%\n\017Repli" +
|
||||
"cationLock\022\022\n\nlock_owner\030\001 \002(\t\"\252\001\n\tTable" +
|
||||
"Lock\022\'\n\ntable_name\030\001 \001(\0132\023.hbase.pb.Tabl" +
|
||||
"eName\022(\n\nlock_owner\030\002 \001(\0132\024.hbase.pb.Ser" +
|
||||
"verName\022\021\n\tthread_id\030\003 \001(\003\022\021\n\tis_shared\030" +
|
||||
"\004 \001(\010\022\017\n\007purpose\030\005 \001(\t\022\023\n\013create_time\030\006 " +
|
||||
"\001(\003\"\036\n\013SwitchState\022\017\n\007enabled\030\001 \001(\010BE\n*o" +
|
||||
"rg.apache.hadoop.hbase.protobuf.generate",
|
||||
"dB\017ZooKeeperProtosH\001\210\001\001\240\001\001"
|
||||
"\n\017ZooKeeper.proto\022\010hbase.pb\032\013Table.proto" +
|
||||
"\032\013HBase.proto\032\023ClusterStatus.proto\"y\n\020Me" +
|
||||
"taRegionServer\022$\n\006server\030\001 \002(\0132\024.hbase.p" +
|
||||
"b.ServerName\022\023\n\013rpc_version\030\002 \001(\r\022*\n\005sta" +
|
||||
"te\030\003 \001(\0162\033.hbase.pb.RegionState.State\"V\n" +
|
||||
"\006Master\022$\n\006master\030\001 \002(\0132\024.hbase.pb.Serve" +
|
||||
"rName\022\023\n\013rpc_version\030\002 \001(\r\022\021\n\tinfo_port\030" +
|
||||
"\003 \001(\r\"\037\n\tClusterUp\022\022\n\nstart_date\030\001 \002(\t\"\221" +
|
||||
"\001\n\020RegionTransition\022\027\n\017event_type_code\030\001" +
|
||||
" \002(\r\022\023\n\013region_name\030\002 \002(\014\022\023\n\013create_time",
|
||||
"\030\003 \002(\004\022)\n\013server_name\030\004 \002(\0132\024.hbase.pb.S" +
|
||||
"erverName\022\017\n\007payload\030\005 \001(\014\"\247\002\n\014SplitLogT" +
|
||||
"ask\022+\n\005state\030\001 \002(\0162\034.hbase.pb.SplitLogTa" +
|
||||
"sk.State\022)\n\013server_name\030\002 \002(\0132\024.hbase.pb" +
|
||||
".ServerName\022:\n\004mode\030\003 \001(\0162#.hbase.pb.Spl" +
|
||||
"itLogTask.RecoveryMode:\007UNKNOWN\"C\n\005State" +
|
||||
"\022\016\n\nUNASSIGNED\020\000\022\t\n\005OWNED\020\001\022\014\n\010RESIGNED\020" +
|
||||
"\002\022\010\n\004DONE\020\003\022\007\n\003ERR\020\004\">\n\014RecoveryMode\022\013\n\007" +
|
||||
"UNKNOWN\020\000\022\021\n\rLOG_SPLITTING\020\001\022\016\n\nLOG_REPL" +
|
||||
"AY\020\002\"w\n\005Table\022-\n\005state\030\001 \002(\0162\025.hbase.pb.",
|
||||
"Table.State:\007ENABLED\"?\n\005State\022\013\n\007ENABLED" +
|
||||
"\020\000\022\014\n\010DISABLED\020\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABL" +
|
||||
"ING\020\003\"D\n\007TableCF\022\'\n\ntable_name\030\001 \001(\0132\023.h" +
|
||||
"base.pb.TableName\022\020\n\010families\030\002 \003(\014\"\330\001\n\017" +
|
||||
"ReplicationPeer\022\022\n\nclusterkey\030\001 \002(\t\022\037\n\027r" +
|
||||
"eplicationEndpointImpl\030\002 \001(\t\022&\n\004data\030\003 \003" +
|
||||
"(\0132\030.hbase.pb.BytesBytesPair\022/\n\rconfigur" +
|
||||
"ation\030\004 \003(\0132\030.hbase.pb.NameStringPair\022$\n" +
|
||||
"\ttable_cfs\030\005 \003(\0132\021.hbase.pb.TableCF\022\021\n\tb" +
|
||||
"andwidth\030\006 \001(\003\"g\n\020ReplicationState\022/\n\005st",
|
||||
"ate\030\001 \002(\0162 .hbase.pb.ReplicationState.St" +
|
||||
"ate\"\"\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLED\020\001\"" +
|
||||
"+\n\027ReplicationHLogPosition\022\020\n\010position\030\001" +
|
||||
" \002(\003\"%\n\017ReplicationLock\022\022\n\nlock_owner\030\001 " +
|
||||
"\002(\t\"\252\001\n\tTableLock\022\'\n\ntable_name\030\001 \001(\0132\023." +
|
||||
"hbase.pb.TableName\022(\n\nlock_owner\030\002 \001(\0132\024" +
|
||||
".hbase.pb.ServerName\022\021\n\tthread_id\030\003 \001(\003\022" +
|
||||
"\021\n\tis_shared\030\004 \001(\010\022\017\n\007purpose\030\005 \001(\t\022\023\n\013c" +
|
||||
"reate_time\030\006 \001(\003\"\036\n\013SwitchState\022\017\n\007enabl" +
|
||||
"ed\030\001 \001(\010BE\n*org.apache.hadoop.hbase.prot",
|
||||
"obuf.generatedB\017ZooKeeperProtosH\001\210\001\001\240\001\001"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
|
@ -11113,6 +11113,7 @@ public final class ZooKeeperProtos {
|
|||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.protobuf.generated.TableProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(),
|
||||
}, assigner);
|
||||
|
|
|
@ -23,7 +23,7 @@ option java_generic_services = true;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import "HBase.proto";
|
||||
import "Table.proto";
|
||||
|
||||
message Permission {
|
||||
enum Action {
|
||||
|
|
|
@ -25,14 +25,8 @@ option java_generate_equals_and_hash = true;
|
|||
option optimize_for = SPEED;
|
||||
|
||||
import "Cell.proto";
|
||||
|
||||
/**
|
||||
* Table Name
|
||||
*/
|
||||
message TableName {
|
||||
required bytes namespace = 1;
|
||||
required bytes qualifier = 2;
|
||||
}
|
||||
import "Table.proto";
|
||||
import "AccessControl.proto";
|
||||
|
||||
/**
|
||||
* Table Schema
|
||||
|
@ -221,3 +215,21 @@ message RegionServerInfo {
|
|||
optional int32 infoPort = 1;
|
||||
optional VersionInfo version_info = 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Description of the snapshot to take
|
||||
*/
|
||||
message SnapshotDescription {
|
||||
required string name = 1;
|
||||
optional string table = 2; // not needed for delete, but checked for in taking snapshot
|
||||
optional int64 creation_time = 3 [default = 0];
|
||||
enum Type {
|
||||
DISABLED = 0;
|
||||
FLUSH = 1;
|
||||
SKIPFLUSH = 2;
|
||||
}
|
||||
optional Type type = 4 [default = FLUSH];
|
||||
optional int32 version = 5;
|
||||
optional string owner = 6;
|
||||
optional UsersAndPermissions users_and_permissions = 7;
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ option java_generic_services = true;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import "Table.proto";
|
||||
import "HBase.proto";
|
||||
import "Client.proto";
|
||||
import "ClusterStatus.proto";
|
||||
|
|
|
@ -23,6 +23,7 @@ option java_generic_services = true;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import "Table.proto";
|
||||
import "HBase.proto";
|
||||
import "RPC.proto";
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ option java_generic_services = true;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import 'Table.proto';
|
||||
import 'HBase.proto';
|
||||
import 'Client.proto';
|
||||
|
||||
|
|
|
@ -23,28 +23,9 @@ option java_generic_services = true;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import "AccessControl.proto";
|
||||
import "FS.proto";
|
||||
import "HBase.proto";
|
||||
|
||||
/**
|
||||
* Description of the snapshot to take
|
||||
*/
|
||||
message SnapshotDescription {
|
||||
required string name = 1;
|
||||
optional string table = 2; // not needed for delete, but checked for in taking snapshot
|
||||
optional int64 creation_time = 3 [default = 0];
|
||||
enum Type {
|
||||
DISABLED = 0;
|
||||
FLUSH = 1;
|
||||
SKIPFLUSH = 2;
|
||||
}
|
||||
optional Type type = 4 [default = FLUSH];
|
||||
optional int32 version = 5;
|
||||
optional string owner = 6;
|
||||
optional UsersAndPermissions users_and_permissions = 7;
|
||||
}
|
||||
|
||||
message SnapshotFileInfo {
|
||||
enum Type {
|
||||
HFILE = 1;
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// This file contains protocol buffers that are shared throughout HBase
|
||||
package hbase.pb;
|
||||
|
||||
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
|
||||
option java_outer_classname = "TableProtos";
|
||||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
/**
|
||||
* Table Name
|
||||
*/
|
||||
message TableName {
|
||||
required bytes namespace = 1;
|
||||
required bytes qualifier = 2;
|
||||
}
|
|
@ -23,6 +23,7 @@ option java_generic_services = false;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import "Table.proto";
|
||||
import "HBase.proto";
|
||||
import "Client.proto";
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ option java_generic_services = true;
|
|||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
import "Table.proto";
|
||||
import "HBase.proto";
|
||||
import "ClusterStatus.proto";
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ org.apache.hadoop.hbase.HTableDescriptor;
|
|||
org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
org.apache.hadoop.hbase.TableName;
|
||||
org.apache.hadoop.hbase.tool.Canary;
|
||||
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
org.apache.hadoop.hbase.master.DeadServer;
|
||||
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin;
|
|||
import org.apache.hadoop.hbase.master.RegionPlan;
|
||||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin;
|
|||
import org.apache.hadoop.hbase.master.RegionPlan;
|
||||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
|||
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
|
||||
import org.apache.hadoop.hbase.io.Reference;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
|
||||
|
@ -56,7 +57,6 @@ import org.apache.hadoop.hbase.regionserver.Store;
|
|||
import org.apache.hadoop.hbase.regionserver.StoreFile;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
|
||||
import org.apache.hadoop.hbase.wal.WALKey;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
|
||||
|
|
|
@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Admin;
|
|||
import org.apache.hadoop.hbase.master.RegionPlan;
|
||||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* Defines coprocessor hooks for interacting with operations on the
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
|||
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
|
||||
import org.apache.hadoop.hbase.io.Reference;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.regionserver.Region.Operation;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
|
@ -55,7 +56,6 @@ import org.apache.hadoop.hbase.regionserver.ScanType;
|
|||
import org.apache.hadoop.hbase.regionserver.Store;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFile;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
|
||||
import org.apache.hadoop.hbase.wal.WALKey;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
|
||||
|
|
|
@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.client.IsolationLevel;
|
|||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
|
||||
|
|
|
@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
|
|||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.metrics.MetricRegistry;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* Provides the coprocessor framework and environment for master oriented
|
||||
|
|
|
@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStor
|
|||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest;
|
||||
|
@ -185,7 +186,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Repor
|
|||
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.RSRpcServices;
|
||||
import org.apache.hadoop.hbase.security.AccessDeniedException;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
|
@ -903,7 +903,7 @@ public class MasterRpcServices extends RSRpcServices
|
|||
List<TableName> tableNameList = null;
|
||||
if (req.getTableNamesCount() > 0) {
|
||||
tableNameList = new ArrayList<TableName>(req.getTableNamesCount());
|
||||
for (HBaseProtos.TableName tableNamePB: req.getTableNamesList()) {
|
||||
for (TableProtos.TableName tableNamePB: req.getTableNamesList()) {
|
||||
tableNameList.add(ProtobufUtil.toTableName(tableNamePB));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.master;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignException;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
|
||||
/**
|
||||
* Watch the current snapshot under process
|
||||
|
|
|
@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel;
|
|||
import org.apache.hadoop.hbase.master.handler.CreateTableHandler;
|
||||
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
||||
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
|
||||
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.ServerName;
|
|||
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignException;
|
||||
import org.apache.hadoop.hbase.master.MasterServices;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
|
|||
import org.apache.hadoop.hbase.master.MasterServices;
|
||||
import org.apache.hadoop.hbase.procedure.Procedure;
|
||||
import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
|||
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor;
|
||||
import org.apache.hadoop.hbase.master.MasterServices;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;
|
||||
|
|
|
@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel;
|
|||
import org.apache.hadoop.hbase.master.handler.TableEventHandler;
|
||||
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
||||
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
|
||||
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
|
||||
|
|
|
@ -62,8 +62,8 @@ import org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;
|
|||
import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinatorRpcs;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
|
||||
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
|
||||
import org.apache.hadoop.hbase.security.AccessDeniedException;
|
||||
|
|
|
@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.master.TableLockManager;
|
|||
import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
|
||||
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
||||
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.regionserver.querymatcher;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.Cell;
|
|
@ -156,7 +156,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
|
|||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
|
||||
|
|
|
@ -73,9 +73,9 @@ import org.apache.hadoop.hbase.io.Reference;
|
|||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.ipc.RpcServer;
|
||||
import org.apache.hadoop.hbase.metrics.MetricRegistry;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.Region.Operation;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
|
||||
import org.apache.hadoop.hbase.wal.WALKey;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.KeepDeletedCells;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanType;
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver.querymatcher;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.KeepDeletedCells;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,10 +32,11 @@ import org.apache.hadoop.hbase.client.Scan;
|
|||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanType;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.CellUtil;
|
|||
import org.apache.hadoop.hbase.KeepDeletedCells;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.util.SortedSet;
|
|||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
|
|
@ -26,10 +26,11 @@ import org.apache.hadoop.hbase.KeyValue.Type;
|
|||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
|
||||
|
||||
/**
|
||||
* A query matcher that is specifically designed for the scan case.
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
|
||||
/**
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
|
|||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
|
||||
import org.apache.hadoop.hbase.procedure.ProcedureMember;
|
||||
import org.apache.hadoop.hbase.procedure.Subprocedure;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.regionserver.Region.FlushResult;
|
||||
|
|
|
@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
|
|||
import org.apache.hadoop.hbase.procedure.Subprocedure;
|
||||
import org.apache.hadoop.hbase.procedure.SubprocedureFactory;
|
||||
import org.apache.hadoop.hbase.procedure.ZKProcedureMemberRpcs;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
|
||||
|
|
|
@ -91,10 +91,10 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter;
|
|||
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
|
|
|
@ -90,13 +90,13 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil
|
|||
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService;
|
||||
import org.apache.hadoop.hbase.regionserver.BloomType;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
|
||||
import org.apache.hadoop.hbase.regionserver.OperationStatus;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
|
||||
import org.apache.hadoop.hbase.security.AccessDeniedException;
|
||||
import org.apache.hadoop.hbase.security.Superusers;
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.client.Admin;
|
||||
import org.apache.hadoop.hbase.client.Connection;
|
||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
|
|
|
@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.io.FileLink;
|
|||
import org.apache.hadoop.hbase.io.HFileLink;
|
||||
import org.apache.hadoop.hbase.io.WALLink;
|
||||
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
|
|
|
@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.io.Reference;
|
|||
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
||||
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.client.Admin;
|
|||
import org.apache.hadoop.hbase.client.Connection;
|
||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.security.access.AccessControlLists;
|
||||
import org.apache.hadoop.hbase.security.access.TablePermission;
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
|
|||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
|
|
@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.HRegionInfo;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.fs.FileStatus;
|
|||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.PathFilter;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.io.HFileLink;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
import="org.apache.hadoop.hbase.client.HConnectionManager"
|
||||
import="org.apache.hadoop.hbase.master.HMaster"
|
||||
import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
|
||||
import="org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription"
|
||||
import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription"
|
||||
import="org.apache.hadoop.util.StringUtils"
|
||||
import="org.apache.hadoop.hbase.TableName"
|
||||
import="org.apache.hadoop.hbase.HBaseConfiguration" %>
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
import="org.apache.hadoop.hbase.HBaseConfiguration"
|
||||
import="org.apache.hadoop.hbase.client.Admin"
|
||||
import="org.apache.hadoop.hbase.master.HMaster"
|
||||
import="org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription"
|
||||
import="org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription"
|
||||
import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
|
||||
import="org.apache.hadoop.hbase.TableName"
|
||||
import="org.apache.hadoop.util.StringUtils" %>
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
|
||||
|
|
|
@ -59,10 +59,10 @@ import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Admin;
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
|||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.TableProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
|
@ -240,7 +240,7 @@ public class TestHRegionInfo {
|
|||
|
||||
// test convert RegionInfo without replicaId
|
||||
RegionInfo info = RegionInfo.newBuilder()
|
||||
.setTableName(HBaseProtos.TableName.newBuilder()
|
||||
.setTableName(TableProtos.TableName.newBuilder()
|
||||
.setQualifier(ByteString.copyFrom(tableName.getQualifier()))
|
||||
.setNamespace(ByteString.copyFrom(tableName.getNamespace()))
|
||||
.build())
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.regionserver.querymatcher;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
|
|
@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.HRegionLocation;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.ProcedureInfo;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.security.Superusers;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
|
|
|
@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
|
|||
import org.apache.hadoop.hbase.filter.BinaryComparator;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter;
|
||||
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
|
|
|
@ -59,7 +59,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
|
|||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.master.MasterFileSystem;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Admin;
|
|||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock;
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Admin;
|
|||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos;
|
||||
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
|
@ -108,10 +109,9 @@ public class TestRegionSnapshotTask {
|
|||
|
||||
List<HRegion> hRegions = TEST_UTIL.getHBaseCluster().getRegions(tableName);
|
||||
|
||||
final SnapshotProtos.SnapshotDescription snapshot =
|
||||
SnapshotProtos.SnapshotDescription.newBuilder()
|
||||
final HBaseProtos.SnapshotDescription snapshot = HBaseProtos.SnapshotDescription.newBuilder()
|
||||
.setTable(tableName.getNameAsString())
|
||||
.setType(SnapshotProtos.SnapshotDescription.Type.FLUSH)
|
||||
.setType(HBaseProtos.SnapshotDescription.Type.FLUSH)
|
||||
.setName("test_table_snapshot")
|
||||
.setVersion(SnapshotManifestV2.DESCRIPTOR_VERSION)
|
||||
.build();
|
||||
|
@ -161,7 +161,7 @@ public class TestRegionSnapshotTask {
|
|||
SnapshotReferenceUtil.verifySnapshot(conf, fs, manifest);
|
||||
}
|
||||
|
||||
private void addRegionToSnapshot(SnapshotProtos.SnapshotDescription snapshot,
|
||||
private void addRegionToSnapshot(HBaseProtos.SnapshotDescription snapshot,
|
||||
HRegion region, SnapshotManifest manifest) throws Exception {
|
||||
LOG.info("Adding region to snapshot: " + region.getRegionInfo().getRegionNameAsString());
|
||||
Path workingDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(snapshot, rootDir);
|
||||
|
@ -171,7 +171,7 @@ public class TestRegionSnapshotTask {
|
|||
}
|
||||
|
||||
private SnapshotManifest.RegionVisitor createRegionVisitorWithDelay(
|
||||
SnapshotProtos.SnapshotDescription desc, Path workingDir) {
|
||||
HBaseProtos.SnapshotDescription desc, Path workingDir) {
|
||||
return new SnapshotManifestV2.ManifestBuilder(conf, fs, workingDir) {
|
||||
@Override
|
||||
public void storeFile(final SnapshotProtos.SnapshotRegionManifest.Builder region,
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Admin;
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
|
||||
import org.apache.hadoop.hbase.io.HFileLink;
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.coprocessor.BaseMasterObserver;
|
|||
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
||||
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.TestTableName;
|
||||
import org.junit.After;
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
|
||||
import org.junit.After;
|
||||
|
|
|
@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
|||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
|
|
Loading…
Reference in New Issue