HBASE-25174 Remove deprecated fields in HConstants (#2558)

Remove the deprecated fields, which can be removed in 3.0.0. Marked the
constant OLDEST_TIMESTAMP as InterfaceAudience.Private as it is only use
in classes, which are also marked as InterfaceAudience.Private.

Signed-off-by: Viraj Jasani <vjasani@apache.org>
Signed-off-by: Duo Zhang <zhangduo@apache.org>
This commit is contained in:
Jan Hentschel 2021-04-03 17:12:16 +02:00 committed by GitHub
parent 5a63fe65aa
commit 048ca4e43f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 58 additions and 79 deletions

View File

@ -25,7 +25,6 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.regex.Pattern;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@ -672,16 +671,6 @@ public final class HConstants {
*/ */
public static final long LATEST_TIMESTAMP = Long.MAX_VALUE; public static final long LATEST_TIMESTAMP = Long.MAX_VALUE;
/**
* Timestamp to use when we want to refer to the oldest cell.
* Special! Used in fake Cells only. Should never be the timestamp on an actual Cell returned to
* a client.
* @deprecated Should not be public since hbase-1.3.0. For internal use only. Move internal to
* Scanners flagged as special timestamp value never to be returned as timestamp on a Cell.
*/
@Deprecated
public static final long OLDEST_TIMESTAMP = Long.MIN_VALUE;
/** /**
* LATEST_TIMESTAMP in bytes form * LATEST_TIMESTAMP in bytes form
*/ */
@ -944,14 +933,6 @@ public final class HConstants {
public static final String HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = public static final String HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD =
"hbase.client.scanner.timeout.period"; "hbase.client.scanner.timeout.period";
/**
* Use {@link #HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD} instead.
* @deprecated This config option is deprecated. Will be removed at later releases after 0.96.
*/
@Deprecated
public static final String HBASE_REGIONSERVER_LEASE_PERIOD_KEY =
"hbase.regionserver.lease.period";
/** /**
* Default value of {@link #HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD}. * Default value of {@link #HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD}.
*/ */
@ -1073,42 +1054,7 @@ public final class HConstants {
*/ */
public static final float HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f; public static final float HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f;
/**
* @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0.
*/
@Deprecated
public static final Pattern CP_HTD_ATTR_KEY_PATTERN =
Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE);
/**
* <pre>
* Pattern that matches a coprocessor specification. Form is:
* {@code <coprocessor jar file location> '|' <class name> ['|' <priority> ['|' <arguments>]]}
* where arguments are {@code <KEY> '=' <VALUE> [,...]}
* For example: {@code hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
* </pre>
* @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0.
*/
@Deprecated
public static final Pattern CP_HTD_ATTR_VALUE_PATTERN =
Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$");
/**
* @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0.
*/
@Deprecated
public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+";
/**
* @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0.
*/
@Deprecated
public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+";
/**
* @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0.
*/
@Deprecated
public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile(
"(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" +
CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?");
public static final String CP_HTD_ATTR_INCLUSION_KEY = public static final String CP_HTD_ATTR_INCLUSION_KEY =
"hbase.coprocessor.classloader.included.classes"; "hbase.coprocessor.classloader.included.classes";
@ -1240,12 +1186,6 @@ public final class HConstants {
public static final int ADMIN_QOS = 100; public static final int ADMIN_QOS = 100;
public static final int HIGH_QOS = 200; public static final int HIGH_QOS = 200;
public static final int SYSTEMTABLE_QOS = HIGH_QOS; public static final int SYSTEMTABLE_QOS = HIGH_QOS;
/**
* @deprecated the name "META_QOS" is a bit ambiguous, actually only meta region transition can
* use this priority, and you should not use this directly. Will be removed in 3.0.0.
*/
@Deprecated
public static final int META_QOS = 300;
/** Directory under /hbase where archived hfiles are stored */ /** Directory under /hbase where archived hfiles are stored */
public static final String HFILE_ARCHIVE_DIRECTORY = "archive"; public static final String HFILE_ARCHIVE_DIRECTORY = "archive";

View File

@ -1214,7 +1214,7 @@ public class KeyValue implements ExtendedCell, Cloneable {
if (timestamp == HConstants.LATEST_TIMESTAMP) { if (timestamp == HConstants.LATEST_TIMESTAMP) {
return "LATEST_TIMESTAMP"; return "LATEST_TIMESTAMP";
} }
if (timestamp == HConstants.OLDEST_TIMESTAMP) { if (timestamp == PrivateConstants.OLDEST_TIMESTAMP) {
return "OLDEST_TIMESTAMP"; return "OLDEST_TIMESTAMP";
} }
return String.valueOf(timestamp); return String.valueOf(timestamp);

View File

@ -284,7 +284,7 @@ public class KeyValueUtil {
final byte[] family, final int foffset, final int flength, final byte[] qualifier, final byte[] family, final int foffset, final int flength, final byte[] qualifier,
final int qoffset, final int qlength) { final int qoffset, final int qlength) {
return new KeyValue(row, roffset, rlength, family, foffset, flength, qualifier, qoffset, return new KeyValue(row, roffset, rlength, family, foffset, flength, qualifier, qoffset,
qlength, HConstants.OLDEST_TIMESTAMP, Type.Minimum, null, 0, 0); qlength, PrivateConstants.OLDEST_TIMESTAMP, Type.Minimum, null, 0, 0);
} }
/** /**

View File

@ -1765,7 +1765,7 @@ public final class PrivateCellUtil {
@Override @Override
public long getTimestamp() { public long getTimestamp() {
return HConstants.OLDEST_TIMESTAMP; return PrivateConstants.OLDEST_TIMESTAMP;
} }
@Override @Override
@ -2000,7 +2000,7 @@ public final class PrivateCellUtil {
@Override @Override
public long getTimestamp() { public long getTimestamp() {
return HConstants.OLDEST_TIMESTAMP; return PrivateConstants.OLDEST_TIMESTAMP;
} }
@Override @Override
@ -2760,7 +2760,7 @@ public final class PrivateCellUtil {
byte type = cell.getTypeByte(); byte type = cell.getTypeByte();
if (type != KeyValue.Type.Minimum.getCode()) { if (type != KeyValue.Type.Minimum.getCode()) {
type = KeyValue.Type.values()[KeyValue.Type.codeToType(type).ordinal() - 1].getCode(); type = KeyValue.Type.values()[KeyValue.Type.codeToType(type).ordinal() - 1].getCode();
} else if (ts != HConstants.OLDEST_TIMESTAMP) { } else if (ts != PrivateConstants.OLDEST_TIMESTAMP) {
ts = ts - 1; ts = ts - 1;
type = KeyValue.Type.Maximum.getCode(); type = KeyValue.Type.Maximum.getCode();
} else { } else {

View File

@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Constants intended to be used internally.
*/
@InterfaceAudience.Private
public final class PrivateConstants {
private PrivateConstants() {
}
/**
* Timestamp to use when we want to refer to the oldest cell.
* Special! Used in fake Cells only. Should never be the timestamp on an actual Cell returned to
* a client.
*/
public static final long OLDEST_TIMESTAMP = Long.MIN_VALUE;
}

View File

@ -28,6 +28,7 @@ import java.util.NoSuchElementException;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateConstants;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -113,7 +114,7 @@ public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator
scanInfo = store.getScanInfo(); scanInfo = store.getScanInfo();
} }
scanner = new StoreScanner(store, scanInfo, scanners, ScanType.COMPACT_RETAIN_DELETES, scanner = new StoreScanner(store, scanInfo, scanners, ScanType.COMPACT_RETAIN_DELETES,
store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); store.getSmallestReadPoint(), PrivateConstants.OLDEST_TIMESTAMP);
if (cpHost != null) { if (cpHost != null) {
InternalScanner scannerFromCp = cpHost.preMemStoreCompactionCompact(store, scanner); InternalScanner scannerFromCp = cpHost.preMemStoreCompactionCompact(store, scanner);
if (scannerFromCp == null) { if (scannerFromCp == null) {

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateConstants;
import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
@ -85,7 +86,7 @@ abstract class StoreFlusher {
} }
final long smallestReadPoint = store.getSmallestReadPoint(); final long smallestReadPoint = store.getSmallestReadPoint();
InternalScanner scanner = new StoreScanner(store, scanInfo, snapshotScanners, InternalScanner scanner = new StoreScanner(store, scanInfo, snapshotScanners,
ScanType.COMPACT_RETAIN_DELETES, smallestReadPoint, HConstants.OLDEST_TIMESTAMP); ScanType.COMPACT_RETAIN_DELETES, smallestReadPoint, PrivateConstants.OLDEST_TIMESTAMP);
if (store.getCoprocessorHost() != null) { if (store.getCoprocessorHost() != null) {
try { try {

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateConstants;
import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.executor.ExecutorService;
@ -356,7 +357,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner
UserScanQueryMatcher.create(scan, scanInfo, columns, oldestUnexpiredTS, now, null); UserScanQueryMatcher.create(scan, scanInfo, columns, oldestUnexpiredTS, now, null);
} else { } else {
this.matcher = CompactionScanQueryMatcher.create(scanInfo, scanType, Long.MAX_VALUE, this.matcher = CompactionScanQueryMatcher.create(scanInfo, scanType, Long.MAX_VALUE,
HConstants.OLDEST_TIMESTAMP, oldestUnexpiredTS, now, null, null, null); PrivateConstants.OLDEST_TIMESTAMP, oldestUnexpiredTS, now, null, null, null);
} }
seekAllScanner(scanInfo, scanners); seekAllScanner(scanInfo, scanners);
} }
@ -379,7 +380,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner
this(null, maxVersions > 0 ? new Scan().readVersions(maxVersions) this(null, maxVersions > 0 ? new Scan().readVersions(maxVersions)
: SCAN_FOR_COMPACTION, scanInfo, 0, 0L, false, scanType); : SCAN_FOR_COMPACTION, scanInfo, 0, 0L, false, scanType);
this.matcher = CompactionScanQueryMatcher.create(scanInfo, scanType, Long.MAX_VALUE, this.matcher = CompactionScanQueryMatcher.create(scanInfo, scanType, Long.MAX_VALUE,
HConstants.OLDEST_TIMESTAMP, oldestUnexpiredTS, now, null, null, null); PrivateConstants.OLDEST_TIMESTAMP, oldestUnexpiredTS, now, null, null, null);
seekAllScanner(scanInfo, scanners); seekAllScanner(scanInfo, scanners);
} }

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.PrivateConstants;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileInfo;
@ -196,7 +197,7 @@ public abstract class Compactor<T extends CellSink> {
if (tmp == null) { if (tmp == null) {
// There's a file with no information, must be an old one // There's a file with no information, must be an old one
// assume we have very old puts // assume we have very old puts
fd.earliestPutTs = earliestPutTs = HConstants.OLDEST_TIMESTAMP; fd.earliestPutTs = earliestPutTs = PrivateConstants.OLDEST_TIMESTAMP;
} else { } else {
earliestPutTs = Bytes.toLong(tmp); earliestPutTs = Bytes.toLong(tmp);
fd.earliestPutTs = Math.min(fd.earliestPutTs, earliestPutTs); fd.earliestPutTs = Math.min(fd.earliestPutTs, earliestPutTs);

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.PrivateConstants;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
@ -191,7 +192,7 @@ public abstract class ScanQueryMatcher implements ShipperListener {
// check if this is a fake cell. The fake cell is an optimization, we should make the scanner // check if this is a fake cell. The fake cell is an optimization, we should make the scanner
// seek to next column or next row. See StoreFileScanner.requestSeek for more details. // seek to next column or next row. See StoreFileScanner.requestSeek for more details.
// check for early out based on timestamp alone // check for early out based on timestamp alone
if (timestamp == HConstants.OLDEST_TIMESTAMP || columns.isDone(timestamp)) { if (timestamp == PrivateConstants.OLDEST_TIMESTAMP || columns.isDone(timestamp)) {
return columns.getNextRowOrNextColumn(cell); return columns.getNextRowOrNextColumn(cell);
} }
// check if the cell is expired by cell TTL // check if the cell is expired by cell TTL
@ -319,7 +320,7 @@ public abstract class ScanQueryMatcher implements ShipperListener {
*/ */
public int compareKeyForNextRow(Cell nextIndexed, Cell currentCell) { public int compareKeyForNextRow(Cell nextIndexed, Cell currentCell) {
return PrivateCellUtil.compareKeyBasedOnColHint(rowComparator, nextIndexed, currentCell, 0, 0, null, 0, return PrivateCellUtil.compareKeyBasedOnColHint(rowComparator, nextIndexed, currentCell, 0, 0, null, 0,
0, HConstants.OLDEST_TIMESTAMP, Type.Minimum.getCode()); 0, PrivateConstants.OLDEST_TIMESTAMP, Type.Minimum.getCode());
} }
/** /**
@ -331,7 +332,7 @@ public abstract class ScanQueryMatcher implements ShipperListener {
ColumnCount nextColumn = columns.getColumnHint(); ColumnCount nextColumn = columns.getColumnHint();
if (nextColumn == null) { if (nextColumn == null) {
return PrivateCellUtil.compareKeyBasedOnColHint(rowComparator, nextIndexed, currentCell, 0, 0, null, return PrivateCellUtil.compareKeyBasedOnColHint(rowComparator, nextIndexed, currentCell, 0, 0, null,
0, 0, HConstants.OLDEST_TIMESTAMP, Type.Minimum.getCode()); 0, 0, PrivateConstants.OLDEST_TIMESTAMP, Type.Minimum.getCode());
} else { } else {
return PrivateCellUtil.compareKeyBasedOnColHint(rowComparator, nextIndexed, currentCell, return PrivateCellUtil.compareKeyBasedOnColHint(rowComparator, nextIndexed, currentCell,
currentCell.getFamilyOffset(), currentCell.getFamilyLength(), nextColumn.getBuffer(), currentCell.getFamilyOffset(), currentCell.getFamilyLength(), nextColumn.getBuffer(),

View File

@ -97,7 +97,7 @@ public class TestMasterQosFunction extends QosTestHelper {
.addTransition(normalTransition).build(); .addTransition(normalTransition).build();
final String reportFuncName = "ReportRegionStateTransition"; final String reportFuncName = "ReportRegionStateTransition";
checkMethod(conf, reportFuncName, HConstants.META_QOS, qosFunction, checkMethod(conf, reportFuncName, 300, qosFunction,
metaTransitionRequest); metaTransitionRequest);
checkMethod(conf, reportFuncName, HConstants.HIGH_QOS, qosFunction, normalTransitionRequest); checkMethod(conf, reportFuncName, HConstants.HIGH_QOS, qosFunction, normalTransitionRequest);
} }

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateConstants;
import org.apache.hadoop.hbase.regionserver.ScanInfo; import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode; import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
@ -82,8 +83,8 @@ public class TestCompactionScanQueryMatcher extends AbstractTestScanQueryMatcher
HConstants.DEFAULT_BLOCKSIZE, -1L, rowComparator, false); HConstants.DEFAULT_BLOCKSIZE, -1L, rowComparator, false);
CompactionScanQueryMatcher qm = CompactionScanQueryMatcher.create(scanInfo, CompactionScanQueryMatcher qm = CompactionScanQueryMatcher.create(scanInfo,
ScanType.COMPACT_RETAIN_DELETES, Long.MAX_VALUE, HConstants.OLDEST_TIMESTAMP, ScanType.COMPACT_RETAIN_DELETES, Long.MAX_VALUE, PrivateConstants.OLDEST_TIMESTAMP,
HConstants.OLDEST_TIMESTAMP, now, from, to, null); PrivateConstants.OLDEST_TIMESTAMP, now, from, to, null);
List<ScanQueryMatcher.MatchCode> actual = new ArrayList<>(rows.length); List<ScanQueryMatcher.MatchCode> actual = new ArrayList<>(rows.length);
byte[] prevRow = null; byte[] prevRow = null;
for (byte[] row : rows) { for (byte[] row : rows) {

View File

@ -53,9 +53,8 @@ public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher {
private static final Logger LOG = LoggerFactory.getLogger(TestUserScanQueryMatcher.class); private static final Logger LOG = LoggerFactory.getLogger(TestUserScanQueryMatcher.class);
/** /**
* This is a cryptic test. It is checking that we don't include a fake cell, one that has a * This is a cryptic test. It is checking that we don't include a fake cell. See HBASE-16074 for
* timestamp of {@link HConstants#OLDEST_TIMESTAMP}. See HBASE-16074 for background. * background.
* @throws IOException
*/ */
@Test @Test
public void testNeverIncludeFakeCell() throws IOException { public void testNeverIncludeFakeCell() throws IOException {