HBASE-18793 Remove deprecated methods in RegionObserver

This commit is contained in:
zhangduo 2017-09-12 20:44:03 +08:00
parent 4b124913f0
commit e6e52cd80f
14 changed files with 253 additions and 881 deletions

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
@ -187,8 +188,9 @@ public class ZooKeeperScanPolicyObserver implements RegionObserver {
} }
@Override @Override
public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<KeyValueScanner> scanners, InternalScanner s) throws IOException { Store store, List<KeyValueScanner> scanners, InternalScanner s, long readPoint)
throws IOException {
ScanInfo scanInfo = getScanInfo(store, c.getEnvironment()); ScanInfo scanInfo = getScanInfo(store, c.getEnvironment());
if (scanInfo == null) { if (scanInfo == null) {
// take default action // take default action
@ -199,10 +201,9 @@ public class ZooKeeperScanPolicyObserver implements RegionObserver {
} }
@Override @Override
public InternalScanner preCompactScannerOpen( public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
final ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
InternalScanner s) throws IOException { InternalScanner s, CompactionRequest request, long readPoint) throws IOException {
ScanInfo scanInfo = getScanInfo(store, c.getEnvironment()); ScanInfo scanInfo = getScanInfo(store, c.getEnvironment());
if (scanInfo == null) { if (scanInfo == null) {
// take default action // take default action
@ -213,9 +214,9 @@ public class ZooKeeperScanPolicyObserver implements RegionObserver {
} }
@Override @Override
public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
final Store store, final Scan scan, final NavigableSet<byte[]> targetCols, Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s, long readPoint)
final KeyValueScanner s) throws IOException { throws IOException {
ScanInfo scanInfo = getScanInfo(store, c.getEnvironment()); ScanInfo scanInfo = getScanInfo(store, c.getEnvironment());
if (scanInfo == null) { if (scanInfo == null) {
// take default action // take default action

View File

@ -19,6 +19,9 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -37,8 +40,8 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@ -73,17 +76,15 @@ import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.regionserver.Region.Operation; import org.apache.hadoop.hbase.regionserver.Region.Operation;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker; import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CoprocessorClassLoader; import org.apache.hadoop.hbase.util.CoprocessorClassLoader;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
/** /**
* Implements the coprocessor environment and runtime support for coprocessors * Implements the coprocessor environment and runtime support for coprocessors
@ -96,8 +97,8 @@ public class RegionCoprocessorHost
private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class); private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class);
// The shared data map // The shared data map
private static ReferenceMap sharedDataMap = private static final ReferenceMap<String, ConcurrentMap<String, Object>> SHARED_DATA_MAP =
new ReferenceMap(AbstractReferenceMap.ReferenceStrength.HARD, new ReferenceMap<>(AbstractReferenceMap.ReferenceStrength.HARD,
AbstractReferenceMap.ReferenceStrength.WEAK); AbstractReferenceMap.ReferenceStrength.WEAK);
// optimization: no need to call postScannerFilterRow, if no coprocessor implements it // optimization: no need to call postScannerFilterRow, if no coprocessor implements it
@ -401,14 +402,11 @@ public class RegionCoprocessorHost
} }
ConcurrentMap<String, Object> classData; ConcurrentMap<String, Object> classData;
// make sure only one thread can add maps // make sure only one thread can add maps
synchronized (sharedDataMap) { synchronized (SHARED_DATA_MAP) {
// as long as at least one RegionEnvironment holds on to its classData it will // as long as at least one RegionEnvironment holds on to its classData it will
// remain in this map // remain in this map
classData = (ConcurrentMap<String, Object>)sharedDataMap.get(implClass.getName()); classData =
if (classData == null) { SHARED_DATA_MAP.computeIfAbsent(implClass.getName(), k -> new ConcurrentHashMap<>());
classData = new ConcurrentHashMap<>();
sharedDataMap.put(implClass.getName(), classData);
}
} }
return new RegionEnvironment(instance, priority, seq, conf, region, return new RegionEnvironment(instance, priority, seq, conf, region,
rsServices, classData); rsServices, classData);
@ -672,136 +670,7 @@ public class RegionCoprocessorHost
}); });
} }
/**
* Invoked just before a split
* @throws IOException
*/
@Deprecated
public void preSplit(final User user) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.preSplit(ctx);
}
});
}
/**
* Invoked just before a split
* @throws IOException
*
* Note: the logic moves to Master; it is unused in RS
*/
@Deprecated
public void preSplit(final byte[] splitRow, final User user) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.preSplit(ctx, splitRow);
}
});
}
/**
* Invoked just after a split
* @param l the new left-hand daughter region
* @param r the new right-hand daughter region
* @throws IOException
*
* Note: the logic moves to Master; it is unused in RS
*/
@Deprecated
public void postSplit(final Region l, final Region r, final User user) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.postSplit(ctx, l, r);
}
});
}
/**
* Note: the logic moves to Master; it is unused in RS
*/
@Deprecated
public boolean preSplitBeforePONR(final byte[] splitKey,
final List<Mutation> metaEntries, final User user) throws IOException {
return execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.preSplitBeforePONR(ctx, splitKey, metaEntries);
}
});
}
/**
* Note: the logic moves to Master; it is unused in RS
*/
@Deprecated
public void preSplitAfterPONR(final User user) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.preSplitAfterPONR(ctx);
}
});
}
/**
* Invoked just before the rollback of a failed split is started
* @throws IOException
*
* Note: the logic moves to Master; it is unused in RS
*/
@Deprecated
public void preRollBackSplit(final User user) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.preRollBackSplit(ctx);
}
});
}
/**
* Invoked just after the rollback of a failed split is done
* @throws IOException
*
* Note: the logic moves to Master; it is unused in RS
*/
@Deprecated
public void postRollBackSplit(final User user) throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation(user) {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.postRollBackSplit(ctx);
}
});
}
/**
* Invoked after a split is completed irrespective of a failure or success.
* @throws IOException
*/
public void postCompleteSplit() throws IOException {
execOperation(coprocessors.isEmpty() ? null : new RegionOperation() {
@Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException {
oserver.postCompleteSplit(ctx);
}
});
}
// RegionObserver support // RegionObserver support
/** /**
* @param get the Get request * @param get the Get request
* @return true if default processing should be bypassed * @return true if default processing should be bypassed
@ -1272,8 +1141,7 @@ public class RegionCoprocessorHost
/** /**
* See * See
* {@link RegionObserver#preStoreScannerOpen(ObserverContext, * {@link RegionObserver#preStoreScannerOpen(ObserverContext, Store, Scan, NavigableSet, KeyValueScanner, long)}
* Store, Scan, NavigableSet, KeyValueScanner)}
*/ */
public KeyValueScanner preStoreScannerOpen(final Store store, final Scan scan, public KeyValueScanner preStoreScannerOpen(final Store store, final Scan scan,
final NavigableSet<byte[]> targetCols, final long readPt) throws IOException { final NavigableSet<byte[]> targetCols, final long readPt) throws IOException {

View File

@ -92,6 +92,7 @@ import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.ScannerContext; import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
@ -1530,9 +1531,8 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
} }
@Override @Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> c, Store store,
final Store store, final InternalScanner scanner, final ScanType scanType) InternalScanner scanner, ScanType scanType, CompactionRequest request) throws IOException {
throws IOException {
requirePermission(getActiveUser(c), "compact", getTableName(c.getEnvironment()), null, null, requirePermission(getActiveUser(c), "compact", getTableName(c.getEnvironment()), null, null,
Action.ADMIN, Action.CREATE); Action.ADMIN, Action.CREATE);
return scanner; return scanner;
@ -1895,30 +1895,6 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
return result; return result;
} }
@Override
public long preIncrementColumnValue(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier,
final long amount, final boolean writeToWAL)
throws IOException {
// Require WRITE permission to the table, CF, and the KV to be replaced by the
// incremented value
RegionCoprocessorEnvironment env = c.getEnvironment();
Map<byte[],? extends Collection<byte[]>> families = makeFamilyMap(family, qualifier);
User user = getActiveUser(c);
AuthResult authResult = permissionGranted(OpType.INCREMENT_COLUMN_VALUE, user, env, families,
Action.WRITE);
if (!authResult.isAllowed() && cellFeaturesEnabled && !compatibleEarlyTermination) {
authResult.setAllowed(checkCoveringPermission(user, OpType.INCREMENT_COLUMN_VALUE, env, row,
families, HConstants.LATEST_TIMESTAMP, Action.WRITE));
authResult.setReason("Covering cell set");
}
logResult(authResult);
if (authorizationEnabled && !authResult.isAllowed()) {
throw new AccessDeniedException("Insufficient permissions " + authResult.toContextString());
}
return -1;
}
@Override @Override
public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append) public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append)
throws IOException { throws IOException {

View File

@ -255,15 +255,8 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
public static class CompactorRegionObserver implements RegionObserver { public static class CompactorRegionObserver implements RegionObserver {
@Override @Override
public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
long earliestPutTs, InternalScanner s) throws IOException { InternalScanner s, CompactionRequest request, long readPoint) throws IOException {
return createCompactorScanner(store, scanners, scanType, earliestPutTs);
}
@Override
public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<? extends KeyValueScanner> scanners, ScanType scanType,
long earliestPutTs, InternalScanner s, CompactionRequest request) throws IOException {
return createCompactorScanner(store, scanners, scanType, earliestPutTs); return createCompactorScanner(store, scanners, scanType, earliestPutTs);
} }

View File

@ -19,6 +19,18 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
@ -43,30 +55,19 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.Leases; import org.apache.hadoop.hbase.regionserver.Leases;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.Region.Operation; import org.apache.hadoop.hbase.regionserver.Region.Operation;
import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFileReader; import org.apache.hadoop.hbase.regionserver.StoreFileReader;
import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKey;
import java.io.IOException; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/** /**
* A sample region observer that tests the RegionObserver interface. * A sample region observer that tests the RegionObserver interface.
@ -82,8 +83,6 @@ public class SimpleRegionObserver implements RegionObserver {
final AtomicInteger ctPreFlush = new AtomicInteger(0); final AtomicInteger ctPreFlush = new AtomicInteger(0);
final AtomicInteger ctPreFlushScannerOpen = new AtomicInteger(0); final AtomicInteger ctPreFlushScannerOpen = new AtomicInteger(0);
final AtomicInteger ctPostFlush = new AtomicInteger(0); final AtomicInteger ctPostFlush = new AtomicInteger(0);
final AtomicInteger ctPreSplit = new AtomicInteger(0);
final AtomicInteger ctPostSplit = new AtomicInteger(0);
final AtomicInteger ctPreCompactSelect = new AtomicInteger(0); final AtomicInteger ctPreCompactSelect = new AtomicInteger(0);
final AtomicInteger ctPostCompactSelect = new AtomicInteger(0); final AtomicInteger ctPostCompactSelect = new AtomicInteger(0);
final AtomicInteger ctPreCompactScanner = new AtomicInteger(0); final AtomicInteger ctPreCompactScanner = new AtomicInteger(0);
@ -124,8 +123,6 @@ public class SimpleRegionObserver implements RegionObserver {
final AtomicInteger ctPostReplayWALs = new AtomicInteger(0); final AtomicInteger ctPostReplayWALs = new AtomicInteger(0);
final AtomicInteger ctPreWALRestore = new AtomicInteger(0); final AtomicInteger ctPreWALRestore = new AtomicInteger(0);
final AtomicInteger ctPostWALRestore = new AtomicInteger(0); final AtomicInteger ctPostWALRestore = new AtomicInteger(0);
final AtomicInteger ctPreSplitBeforePONR = new AtomicInteger(0);
final AtomicInteger ctPreSplitAfterPONR = new AtomicInteger(0);
final AtomicInteger ctPreStoreFileReaderOpen = new AtomicInteger(0); final AtomicInteger ctPreStoreFileReaderOpen = new AtomicInteger(0);
final AtomicInteger ctPostStoreFileReaderOpen = new AtomicInteger(0); final AtomicInteger ctPostStoreFileReaderOpen = new AtomicInteger(0);
final AtomicInteger ctPostBatchMutateIndispensably = new AtomicInteger(0); final AtomicInteger ctPostBatchMutateIndispensably = new AtomicInteger(0);
@ -184,10 +181,11 @@ public class SimpleRegionObserver implements RegionObserver {
} }
@Override @Override
public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<KeyValueScanner> scanners, InternalScanner s) throws IOException { Store store, List<KeyValueScanner> scanners, InternalScanner s, long readPoint)
throws IOException {
ctPreFlushScannerOpen.incrementAndGet(); ctPreFlushScannerOpen.incrementAndGet();
return null; return s;
} }
@Override @Override
@ -204,63 +202,36 @@ public class SimpleRegionObserver implements RegionObserver {
} }
@Override @Override
public void preSplit(ObserverContext<RegionCoprocessorEnvironment> c) { public void preCompactSelection(ObserverContext<RegionCoprocessorEnvironment> c, Store store,
ctPreSplit.incrementAndGet(); List<StoreFile> candidates, CompactionRequest request) throws IOException {
}
@Override
public void preSplitBeforePONR(
ObserverContext<RegionCoprocessorEnvironment> ctx, byte[] splitKey,
List<Mutation> metaEntries) throws IOException {
ctPreSplitBeforePONR.incrementAndGet();
}
@Override
public void preSplitAfterPONR(
ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException {
ctPreSplitAfterPONR.incrementAndGet();
}
@Override
public void postSplit(ObserverContext<RegionCoprocessorEnvironment> c, Region l, Region r) {
ctPostSplit.incrementAndGet();
}
public boolean wasSplit() {
return ctPreSplit.get() > 0 && ctPostSplit.get() > 0;
}
@Override
public void preCompactSelection(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<StoreFile> candidates) {
ctPreCompactSelect.incrementAndGet(); ctPreCompactSelect.incrementAndGet();
} }
@Override @Override
public void postCompactSelection(ObserverContext<RegionCoprocessorEnvironment> c, public void postCompactSelection(ObserverContext<RegionCoprocessorEnvironment> c, Store store,
Store store, ImmutableList<StoreFile> selected) { ImmutableList<StoreFile> selected, CompactionRequest request) {
ctPostCompactSelect.incrementAndGet(); ctPostCompactSelect.incrementAndGet();
} }
@Override @Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> c, Store store,
Store store, InternalScanner scanner, ScanType scanType) { InternalScanner scanner, ScanType scanType, CompactionRequest request) throws IOException {
ctPreCompact.incrementAndGet(); ctPreCompact.incrementAndGet();
return scanner; return scanner;
} }
@Override @Override
public InternalScanner preCompactScannerOpen( public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
final ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
InternalScanner s) throws IOException { InternalScanner s, CompactionRequest request, long readPoint) throws IOException {
ctPreCompactScanner.incrementAndGet(); ctPreCompactScanner.incrementAndGet();
return null; return s;
} }
@Override @Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, public void postCompact(ObserverContext<RegionCoprocessorEnvironment> c, Store store,
Store store, StoreFile resultFile) { StoreFile resultFile, CompactionRequest request) throws IOException {
ctPostCompact.incrementAndGet(); ctPostCompact.incrementAndGet();
} }
@ -277,11 +248,11 @@ public class SimpleRegionObserver implements RegionObserver {
} }
@Override @Override
public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
final Store store, final Scan scan, final NavigableSet<byte[]> targetCols, Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s, long readPt)
final KeyValueScanner s) throws IOException { throws IOException {
ctPreStoreScannerOpen.incrementAndGet(); ctPreStoreScannerOpen.incrementAndGet();
return null; return s;
} }
@Override @Override
@ -864,22 +835,6 @@ public class SimpleRegionObserver implements RegionObserver {
return ctPostFlush.get(); return ctPostFlush.get();
} }
public int getCtPreSplit() {
return ctPreSplit.get();
}
public int getCtPreSplitBeforePONR() {
return ctPreSplitBeforePONR.get();
}
public int getCtPreSplitAfterPONR() {
return ctPreSplitAfterPONR.get();
}
public int getCtPostSplit() {
return ctPostSplit.get();
}
public int getCtPreCompactSelect() { public int getCtPreCompactSelect() {
return ctPreCompactSelect.get(); return ctPreCompactSelect.get();
} }

View File

@ -61,6 +61,7 @@ import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.ScannerContext; import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Rule; import org.junit.Rule;
@ -193,13 +194,13 @@ public class TestCoprocessorInterface {
} }
@Override @Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e,
Store store, InternalScanner scanner, ScanType scanType) { Store store, InternalScanner scanner, ScanType scanType, CompactionRequest request) {
preCompactCalled = true; preCompactCalled = true;
return scanner; return scanner;
} }
@Override @Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e,
Store store, StoreFile resultFile) { Store store, StoreFile resultFile, CompactionRequest request) {
postCompactCalled = true; postCompactCalled = true;
} }
@Override @Override

View File

@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.ScannerContext; import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
@ -416,7 +417,7 @@ public class TestRegionObserverInterface {
@Override @Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store, public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
final InternalScanner scanner, final ScanType scanType) { final InternalScanner scanner, final ScanType scanType, CompactionRequest request) {
return new InternalScanner() { return new InternalScanner() {
@Override @Override
public boolean next(List<Cell> results) throws IOException { public boolean next(List<Cell> results) throws IOException {
@ -455,7 +456,7 @@ public class TestRegionObserverInterface {
@Override @Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store, public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
StoreFile resultFile) { StoreFile resultFile, CompactionRequest request) {
lastCompaction = EnvironmentEdgeManager.currentTime(); lastCompaction = EnvironmentEdgeManager.currentTime();
} }

View File

@ -61,6 +61,7 @@ import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
@ -139,7 +140,8 @@ public class TestRegionObserverScannerOpenHook {
@Override @Override
public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<KeyValueScanner> scanners, InternalScanner s) throws IOException { Store store, List<KeyValueScanner> scanners, InternalScanner s, long readPoint)
throws IOException {
scanners.forEach(KeyValueScanner::close); scanners.forEach(KeyValueScanner::close);
return NO_DATA; return NO_DATA;
} }
@ -153,7 +155,8 @@ public class TestRegionObserverScannerOpenHook {
@Override @Override
public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType,
long earliestPutTs, InternalScanner s) throws IOException { long earliestPutTs, InternalScanner s, CompactionRequest request, long readPoint)
throws IOException {
scanners.forEach(KeyValueScanner::close); scanners.forEach(KeyValueScanner::close);
return NO_DATA; return NO_DATA;
} }

View File

@ -74,6 +74,7 @@ import org.apache.hadoop.hbase.quotas.QuotaUtil;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -459,8 +460,8 @@ public class TestNamespaceAuditor {
volatile CountDownLatch postCompact; volatile CountDownLatch postCompact;
@Override @Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
Store store, StoreFile resultFile) throws IOException { StoreFile resultFile, CompactionRequest request) throws IOException {
postCompact.countDown(); postCompact.countDown();
} }

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.client.TestFromClientSideWithCoprocessor;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
/** /**
* RegionObserver that just reimplements the default behavior, * RegionObserver that just reimplements the default behavior,
@ -44,7 +45,8 @@ public class NoOpScanPolicyObserver implements RegionObserver {
*/ */
@Override @Override
public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
Store store, List<KeyValueScanner> scanners, InternalScanner s) throws IOException { Store store, List<KeyValueScanner> scanners, InternalScanner s, long readPoint)
throws IOException {
ScanInfo oldSI = store.getScanInfo(); ScanInfo oldSI = store.getScanInfo();
ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), store.getColumnFamilyDescriptor(), ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), store.getColumnFamilyDescriptor(),
oldSI.getTtl(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); oldSI.getTtl(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
@ -59,7 +61,7 @@ public class NoOpScanPolicyObserver implements RegionObserver {
public InternalScanner preCompactScannerOpen( public InternalScanner preCompactScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c, Store store, final ObserverContext<RegionCoprocessorEnvironment> c, Store store,
List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
InternalScanner s) throws IOException { InternalScanner s, CompactionRequest request, long readPoint) throws IOException {
// this demonstrates how to override the scanners default behavior // this demonstrates how to override the scanners default behavior
ScanInfo oldSI = store.getScanInfo(); ScanInfo oldSI = store.getScanInfo();
ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), store.getColumnFamilyDescriptor(), ScanInfo scanInfo = new ScanInfo(oldSI.getConfiguration(), store.getColumnFamilyDescriptor(),
@ -69,8 +71,8 @@ public class NoOpScanPolicyObserver implements RegionObserver {
} }
@Override @Override
public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, final Scan scan, final NavigableSet<byte[]> targetCols, KeyValueScanner s) Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s, long readPoint)
throws IOException { throws IOException {
Region r = c.getEnvironment().getRegion(); Region r = c.getEnvironment().getRegion();
return scan.isReversed() ? new ReversedStoreScanner(store, return scan.isReversed() ? new ReversedStoreScanner(store,

View File

@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.TestWALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.TestWALActionsListener;
import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
@ -252,9 +253,8 @@ public class TestHRegionServerBulkLoad {
public static class MyObserver implements RegionObserver { public static class MyObserver implements RegionObserver {
static int sleepDuration; static int sleepDuration;
@Override @Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
final Store store, final InternalScanner scanner, final ScanType scanType) InternalScanner scanner, ScanType scanType, CompactionRequest request) throws IOException {
throws IOException {
try { try {
Thread.sleep(sleepDuration); Thread.sleep(sleepDuration);
} catch (InterruptedException ie) { } catch (InterruptedException ie) {

View File

@ -902,7 +902,7 @@ public class TestAccessController extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
ACCESS_CONTROLLER.preCompact(ObserverContext.createAndPrepare(RCP_ENV, null), null, null, ACCESS_CONTROLLER.preCompact(ObserverContext.createAndPrepare(RCP_ENV, null), null, null,
ScanType.COMPACT_RETAIN_DELETES); ScanType.COMPACT_RETAIN_DELETES, null);
return null; return null;
} }
}; };

View File

@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
@ -239,8 +240,8 @@ public class TestCoprocessorScanPolicy {
@Override @Override
public InternalScanner preFlushScannerOpen( public InternalScanner preFlushScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c, final ObserverContext<RegionCoprocessorEnvironment> c, Store store,
Store store, List<KeyValueScanner> scanners, InternalScanner s) throws IOException { List<KeyValueScanner> scanners, InternalScanner s, long readPoint) throws IOException {
Long newTtl = ttls.get(store.getTableName()); Long newTtl = ttls.get(store.getTableName());
if (newTtl != null) { if (newTtl != null) {
System.out.println("PreFlush:" + newTtl); System.out.println("PreFlush:" + newTtl);
@ -262,7 +263,7 @@ public class TestCoprocessorScanPolicy {
public InternalScanner preCompactScannerOpen( public InternalScanner preCompactScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c, Store store, final ObserverContext<RegionCoprocessorEnvironment> c, Store store,
List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
InternalScanner s) throws IOException { InternalScanner s, CompactionRequest request, long readPoint) throws IOException {
Long newTtl = ttls.get(store.getTableName()); Long newTtl = ttls.get(store.getTableName());
Integer newVersions = versions.get(store.getTableName()); Integer newVersions = versions.get(store.getTableName());
ScanInfo oldSI = store.getScanInfo(); ScanInfo oldSI = store.getScanInfo();