HBASE-18925 Update mockito dependency from mockito-all:1.10.19 to mockito-core:2.1.0 for JDK8 support.
Last mockito-all release was in Dec'14. Mockito-core has had many releases since then. From mockito's site: - "Mockito does not produce the mockito-all artifact anymore ; this one was primarily aimed at ant users, and contained other dependencies. We felt it was time to move on and remove such artifacts as they cause problems in dependency management system like maven or gradle." - anyX() and any(SomeType.class) matchers now reject nulls and check type.
This commit is contained in:
parent
d407e37cf4
commit
d69570a485
|
@ -192,7 +192,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -138,13 +138,12 @@ public class TestClientScanner {
|
|||
RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
|
||||
|
||||
Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(),
|
||||
Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
|
||||
private int count = 0;
|
||||
@Override
|
||||
public Result[] answer(InvocationOnMock invocation) throws Throwable {
|
||||
ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
|
||||
ScannerCallableWithReplicas.class);
|
||||
ScannerCallableWithReplicas callable = invocation.getArgument(0);
|
||||
switch (count) {
|
||||
case 0: // initialize
|
||||
count++;
|
||||
|
@ -176,7 +175,7 @@ public class TestClientScanner {
|
|||
// One for fetching the results
|
||||
// One for fetching empty results and quit as we do not have moreResults hint.
|
||||
inOrder.verify(caller, Mockito.times(2)).callWithoutRetries(
|
||||
Mockito.any(RetryingCallable.class), Mockito.anyInt());
|
||||
Mockito.any(), Mockito.anyInt());
|
||||
|
||||
assertEquals(1, scanner.cache.size());
|
||||
Result r = scanner.cache.poll();
|
||||
|
@ -199,13 +198,12 @@ public class TestClientScanner {
|
|||
RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
|
||||
|
||||
Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(),
|
||||
Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
|
||||
private int count = 0;
|
||||
@Override
|
||||
public Result[] answer(InvocationOnMock invocation) throws Throwable {
|
||||
ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
|
||||
ScannerCallableWithReplicas.class);
|
||||
ScannerCallableWithReplicas callable = invocation.getArgument(0);
|
||||
switch (count) {
|
||||
case 0: // initialize
|
||||
count++;
|
||||
|
@ -235,7 +233,7 @@ public class TestClientScanner {
|
|||
scanner.loadCache();
|
||||
|
||||
inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(
|
||||
Mockito.any(RetryingCallable.class), Mockito.anyInt());
|
||||
Mockito.any(), Mockito.anyInt());
|
||||
|
||||
assertEquals(1, scanner.cache.size());
|
||||
Result r = scanner.cache.poll();
|
||||
|
@ -260,13 +258,12 @@ public class TestClientScanner {
|
|||
RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
|
||||
|
||||
Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(),
|
||||
Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
|
||||
private int count = 0;
|
||||
@Override
|
||||
public Result[] answer(InvocationOnMock invocation) throws Throwable {
|
||||
ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
|
||||
ScannerCallableWithReplicas.class);
|
||||
ScannerCallableWithReplicas callable = invocation.getArgument(0);
|
||||
switch (count) {
|
||||
case 0: // initialize
|
||||
count++;
|
||||
|
@ -296,7 +293,7 @@ public class TestClientScanner {
|
|||
scanner.loadCache();
|
||||
|
||||
inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(
|
||||
Mockito.any(RetryingCallable.class), Mockito.anyInt());
|
||||
Mockito.any(), Mockito.anyInt());
|
||||
|
||||
assertEquals(3, scanner.cache.size());
|
||||
Result r = scanner.cache.poll();
|
||||
|
@ -333,13 +330,12 @@ public class TestClientScanner {
|
|||
RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
|
||||
|
||||
Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(),
|
||||
Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
|
||||
private int count = 0;
|
||||
@Override
|
||||
public Result[] answer(InvocationOnMock invocation) throws Throwable {
|
||||
ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
|
||||
ScannerCallableWithReplicas.class);
|
||||
ScannerCallableWithReplicas callable = invocation.getArgument(0);
|
||||
switch (count) {
|
||||
case 0: // initialize
|
||||
count++;
|
||||
|
@ -369,7 +365,7 @@ public class TestClientScanner {
|
|||
scanner.loadCache();
|
||||
|
||||
inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(
|
||||
Mockito.any(RetryingCallable.class), Mockito.anyInt());
|
||||
Mockito.any(), Mockito.anyInt());
|
||||
|
||||
assertEquals(1, scanner.cache.size());
|
||||
Result r = scanner.cache.poll();
|
||||
|
@ -398,13 +394,12 @@ public class TestClientScanner {
|
|||
RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
|
||||
|
||||
Mockito.when(rpcFactory.<Result[]> newCaller()).thenReturn(caller);
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class),
|
||||
Mockito.when(caller.callWithoutRetries(Mockito.any(),
|
||||
Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {
|
||||
private int count = 0;
|
||||
@Override
|
||||
public Result[] answer(InvocationOnMock invocation) throws Throwable {
|
||||
ScannerCallableWithReplicas callable = invocation.getArgumentAt(0,
|
||||
ScannerCallableWithReplicas.class);
|
||||
ScannerCallableWithReplicas callable = invocation.getArgument(0);
|
||||
switch (count) {
|
||||
case 0: // initialize
|
||||
count++;
|
||||
|
@ -436,7 +431,7 @@ public class TestClientScanner {
|
|||
scanner.loadCache();
|
||||
|
||||
inOrder.verify(caller, Mockito.times(2)).callWithoutRetries(
|
||||
Mockito.any(RetryingCallable.class), Mockito.anyInt());
|
||||
Mockito.any(), Mockito.anyInt());
|
||||
|
||||
assertEquals(2, scanner.cache.size());
|
||||
Result r = scanner.cache.poll();
|
||||
|
|
|
@ -44,7 +44,7 @@ public class TestHTableMultiplexerViaMocks {
|
|||
mockConnection = mock(ClusterConnection.class);
|
||||
|
||||
// Call the real put(TableName, Put, int) method
|
||||
when(mockMultiplexer.put(any(TableName.class), any(Put.class), anyInt())).thenCallRealMethod();
|
||||
when(mockMultiplexer.put(any(TableName.class), any(), anyInt())).thenCallRealMethod();
|
||||
|
||||
// Return the mocked ClusterConnection
|
||||
when(mockMultiplexer.getConnection()).thenReturn(mockConnection);
|
||||
|
|
|
@ -30,9 +30,7 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
|
@ -103,14 +101,14 @@ public class TestSnapshotFromAdmin {
|
|||
Mockito
|
||||
.when(
|
||||
mockMaster.snapshot((RpcController) Mockito.any(),
|
||||
Mockito.any(SnapshotRequest.class))).thenReturn(response);
|
||||
Mockito.any())).thenReturn(response);
|
||||
// setup the response
|
||||
IsSnapshotDoneResponse.Builder builder = IsSnapshotDoneResponse.newBuilder();
|
||||
builder.setDone(false);
|
||||
// first five times, we return false, last we get success
|
||||
Mockito.when(
|
||||
mockMaster.isSnapshotDone((RpcController) Mockito.any(),
|
||||
Mockito.any(IsSnapshotDoneRequest.class))).thenReturn(builder.build(), builder.build(),
|
||||
Mockito.any())).thenReturn(builder.build(), builder.build(),
|
||||
builder.build(), builder.build(), builder.build(), builder.setDone(true).build());
|
||||
|
||||
// setup the admin and run the test
|
||||
|
@ -162,12 +160,12 @@ public class TestSnapshotFromAdmin {
|
|||
Mockito.when(mockConnection.getKeepAliveMasterService()).thenReturn(master);
|
||||
SnapshotResponse response = SnapshotResponse.newBuilder().setExpectedTimeout(0).build();
|
||||
Mockito.when(
|
||||
master.snapshot((RpcController) Mockito.any(), Mockito.any(SnapshotRequest.class)))
|
||||
master.snapshot((RpcController) Mockito.any(), Mockito.any()))
|
||||
.thenReturn(response);
|
||||
IsSnapshotDoneResponse doneResponse = IsSnapshotDoneResponse.newBuilder().setDone(true).build();
|
||||
Mockito.when(
|
||||
master.isSnapshotDone((RpcController) Mockito.any(),
|
||||
Mockito.any(IsSnapshotDoneRequest.class))).thenReturn(doneResponse);
|
||||
Mockito.any())).thenReturn(doneResponse);
|
||||
|
||||
// make sure that we can use valid names
|
||||
admin.snapshot(new SnapshotDescription("snapshot", TableName.valueOf(name.getMethodName())));
|
||||
|
|
|
@ -113,8 +113,8 @@ public class TestHBaseSaslRpcClient {
|
|||
final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token);
|
||||
saslClCallbackHandler.handle(callbackArray);
|
||||
verify(nameCallback).setName(anyString());
|
||||
verify(realmCallback).setText(anyString());
|
||||
verify(passwordCallback).setPassword(any(char[].class));
|
||||
verify(realmCallback).setText(any());
|
||||
verify(passwordCallback).setPassword(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -269,7 +269,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -213,7 +213,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- Some tests rely on Hadoop's KeyStoreTestUtil, which needs bc. -->
|
||||
|
|
|
@ -206,7 +206,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -63,13 +63,13 @@ public class TestMapReduceExamples {
|
|||
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
|
||||
Put put = (Put) invocation.getArguments()[1];
|
||||
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
|
||||
Put put = (Put) invocation.getArgument(1);
|
||||
assertEquals("row", Bytes.toString(writer.get()));
|
||||
assertEquals("row", Bytes.toString(put.getRow()));
|
||||
return null;
|
||||
}
|
||||
}).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class));
|
||||
}).when(ctx).write(any(), any());
|
||||
|
||||
uploader.map(null, new Text("row,family,qualifier,value"), ctx);
|
||||
|
||||
|
@ -134,13 +134,13 @@ public class TestMapReduceExamples {
|
|||
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
|
||||
Put put = (Put) invocation.getArguments()[1];
|
||||
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArgument(0);
|
||||
Put put = (Put) invocation.getArgument(1);
|
||||
assertEquals("tableName-column1", Bytes.toString(writer.get()));
|
||||
assertEquals("test", Bytes.toString(put.getRow()));
|
||||
return null;
|
||||
}
|
||||
}).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class));
|
||||
}).when(ctx).write(any(), any());
|
||||
Result result = mock(Result.class);
|
||||
when(result.getValue(Bytes.toBytes("columnFamily"), Bytes.toBytes("column1"))).thenReturn(
|
||||
Bytes.toBytes("test"));
|
||||
|
|
|
@ -311,7 +311,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.http;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
@ -32,6 +33,7 @@ import java.util.Enumeration;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.servlet.Filter;
|
||||
import javax.servlet.FilterChain;
|
||||
|
@ -48,6 +50,8 @@ import javax.servlet.http.HttpServletResponse;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -90,9 +94,6 @@ import org.eclipse.jetty.webapp.WebAppContext;
|
|||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.servlet.ServletContainer;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
|
||||
/**
|
||||
* Create a Jetty embedded server to answer http requests. The primary goal
|
||||
* is to serve up status information for the server.
|
||||
|
@ -161,6 +162,11 @@ public class HttpServer implements FilterContainer {
|
|||
|
||||
private final List<ListenerInfo> listeners = Lists.newArrayList();
|
||||
|
||||
@VisibleForTesting
|
||||
public List<ServerConnector> getServerConnectors() {
|
||||
return listeners.stream().map(info -> info.listener).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
protected final WebAppContext webAppContext;
|
||||
protected final boolean findPort;
|
||||
protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
|
||||
|
@ -1011,6 +1017,7 @@ public class HttpServer implements FilterContainer {
|
|||
* Open the main listener for the server
|
||||
* @throws Exception
|
||||
*/
|
||||
@VisibleForTesting
|
||||
void openListeners() throws Exception {
|
||||
for (ListenerInfo li : listeners) {
|
||||
ServerConnector listener = li.listener;
|
||||
|
|
|
@ -67,7 +67,6 @@ import org.junit.Ignore;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.internal.util.reflection.Whitebox;
|
||||
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestHttpServer extends HttpServerFunctionalTest {
|
||||
|
@ -557,10 +556,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
HttpServer server = createServer(host, port);
|
||||
try {
|
||||
// not bound, ephemeral should return requested port (0 for ephemeral)
|
||||
List<?> listeners = (List<?>) Whitebox.getInternalState(server,
|
||||
"listeners");
|
||||
ServerConnector listener = (ServerConnector) Whitebox.getInternalState(
|
||||
listeners.get(0), "listener");
|
||||
ServerConnector listener = server.getServerConnectors().get(0);
|
||||
|
||||
assertEquals(port, listener.getPort());
|
||||
// verify hostname is what was given
|
||||
|
|
|
@ -262,7 +262,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -36,6 +36,6 @@ public class TestDriver {
|
|||
ProgramDriver programDriverMock = mock(ProgramDriver.class);
|
||||
Driver.setProgramDriver(programDriverMock);
|
||||
Driver.main(new String[]{});
|
||||
verify(programDriverMock).driver(Mockito.any(String[].class));
|
||||
verify(programDriverMock).driver(Mockito.any());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public class TestGroupingTableMap {
|
|||
gTableMap.map(null, result, outputCollectorMock, reporter);
|
||||
verify(result).listCells();
|
||||
verify(outputCollectorMock, times(1))
|
||||
.collect(any(ImmutableBytesWritable.class), any(Result.class));
|
||||
.collect(any(), any());
|
||||
verifyNoMoreInteractions(outputCollectorMock);
|
||||
} finally {
|
||||
if (gTableMap != null)
|
||||
|
|
|
@ -55,7 +55,7 @@ public class TestIdentityTableMap {
|
|||
reporterMock);
|
||||
|
||||
verify(outputCollectorMock, times(recordNumber)).collect(
|
||||
Mockito.any(ImmutableBytesWritable.class), Mockito.any(Result.class));
|
||||
Mockito.any(), Mockito.any());
|
||||
} finally {
|
||||
if (identityTableMap != null)
|
||||
identityTableMap.close();
|
||||
|
|
|
@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.mapred;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.anyLong;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyInt;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
||||
|
@ -89,7 +89,7 @@ public class TestRowCounter {
|
|||
mock(OutputCollector.class), reporter);
|
||||
|
||||
Mockito.verify(reporter, times(iterationNumber)).incrCounter(
|
||||
any(Enum.class), anyInt());
|
||||
any(), anyLong());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -50,7 +50,7 @@ public class TestGroupingTableMapper {
|
|||
@SuppressWarnings("unchecked")
|
||||
Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Result>.Context context =
|
||||
mock(Mapper.Context.class);
|
||||
context.write(any(ImmutableBytesWritable.class), any(Result.class));
|
||||
context.write(any(), any());
|
||||
List<Cell> keyValue = new ArrayList<>();
|
||||
byte[] row = {};
|
||||
keyValue.add(new KeyValue(row, Bytes.toBytes("family2"), Bytes.toBytes("clm"), Bytes
|
||||
|
|
|
@ -675,13 +675,13 @@ public class TestImportExport {
|
|||
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
|
||||
KeyValue key = (KeyValue) invocation.getArguments()[1];
|
||||
ImmutableBytesWritable writer = invocation.getArgument(0);
|
||||
KeyValue key = invocation.getArgument(1);
|
||||
assertEquals("Key", Bytes.toString(writer.get()));
|
||||
assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
|
||||
return null;
|
||||
}
|
||||
}).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class));
|
||||
}).when(ctx).write(any(), any());
|
||||
|
||||
importer.setup(ctx);
|
||||
Result value = mock(Result.class);
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
|
|||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
|
@ -68,8 +67,8 @@ public class TestMultiTableSnapshotInputFormatImpl {
|
|||
// probably be the more "pure"
|
||||
// way of doing things. This is the lesser of two evils, perhaps?
|
||||
doNothing().when(this.subject).
|
||||
restoreSnapshot(any(Configuration.class), any(String.class), any(Path.class),
|
||||
any(Path.class), any(FileSystem.class));
|
||||
restoreSnapshot(any(), any(), any(),
|
||||
any(), any());
|
||||
|
||||
this.conf = new Configuration();
|
||||
this.rootDir = new Path("file:///test-root-dir");
|
||||
|
@ -180,7 +179,7 @@ public class TestMultiTableSnapshotInputFormatImpl {
|
|||
|
||||
for (Map.Entry<String, Path> entry : snapshotDirs.entrySet()) {
|
||||
verify(this.subject).restoreSnapshot(eq(this.conf), eq(entry.getKey()), eq(this.rootDir),
|
||||
eq(entry.getValue()), any(FileSystem.class));
|
||||
eq(entry.getValue()), any());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -184,13 +184,13 @@ public class TestWALPlayer {
|
|||
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
|
||||
KeyValue key = (KeyValue) invocation.getArguments()[1];
|
||||
ImmutableBytesWritable writer = invocation.getArgument(0);
|
||||
KeyValue key = invocation.getArgument(1);
|
||||
assertEquals("row", Bytes.toString(writer.get()));
|
||||
assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
|
||||
return null;
|
||||
}
|
||||
}).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class));
|
||||
}).when(context).write(any(), any());
|
||||
|
||||
mapper.map(key, value, context);
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -111,7 +111,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -331,7 +331,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -61,8 +61,8 @@ public class TestRemoteAdminRetries {
|
|||
Response response = new Response(509);
|
||||
when(client.get(anyString(), anyString())).thenReturn(response);
|
||||
when(client.delete(anyString())).thenReturn(response);
|
||||
when(client.put(anyString(), anyString(), any(byte[].class))).thenReturn(response);
|
||||
when(client.post(anyString(), anyString(), any(byte[].class))).thenReturn(response);
|
||||
when(client.put(anyString(), anyString(), any())).thenReturn(response);
|
||||
when(client.post(anyString(), anyString(), any())).thenReturn(response);
|
||||
Configuration configuration = TEST_UTIL.getConfiguration();
|
||||
|
||||
configuration.setInt("hbase.rest.client.max.retries", RETRIES);
|
||||
|
@ -120,7 +120,7 @@ public class TestRemoteAdminRetries {
|
|||
remoteAdmin.createTable(new HTableDescriptor(TableName.valueOf("TestTable")));
|
||||
}
|
||||
});
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -70,9 +70,9 @@ public class TestRemoteHTableRetries {
|
|||
Response response = new Response(509);
|
||||
when(client.get(anyString(), anyString())).thenReturn(response);
|
||||
when(client.delete(anyString())).thenReturn(response);
|
||||
when(client.put(anyString(), anyString(), any(byte[].class))).thenReturn(
|
||||
when(client.put(anyString(), anyString(), any())).thenReturn(
|
||||
response);
|
||||
when(client.post(anyString(), anyString(), any(byte[].class))).thenReturn(
|
||||
when(client.post(anyString(), anyString(), any())).thenReturn(
|
||||
response);
|
||||
|
||||
Configuration configuration = TEST_UTIL.getConfiguration();
|
||||
|
@ -118,7 +118,7 @@ public class TestRemoteHTableRetries {
|
|||
remoteTable.put(new Put(Bytes.toBytes("Row")));
|
||||
}
|
||||
});
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -131,7 +131,7 @@ public class TestRemoteHTableRetries {
|
|||
remoteTable.put(Arrays.asList(puts));
|
||||
}
|
||||
});
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -142,7 +142,7 @@ public class TestRemoteHTableRetries {
|
|||
remoteTable.getScanner(new Scan());
|
||||
}
|
||||
});
|
||||
verify(client, times(RETRIES)).post(anyString(), anyString(), any(byte[].class));
|
||||
verify(client, times(RETRIES)).post(anyString(), anyString(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -155,7 +155,7 @@ public class TestRemoteHTableRetries {
|
|||
remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, put );
|
||||
}
|
||||
});
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
|
||||
verify(client, times(RETRIES)).put(anyString(), anyString(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -159,7 +159,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -582,11 +582,11 @@ public class TestRSGroupBasedLoadBalancer {
|
|||
Mockito.when(gm.listRSGroups()).thenReturn(
|
||||
Lists.newLinkedList(groupMap.values()));
|
||||
Mockito.when(gm.isOnline()).thenReturn(true);
|
||||
Mockito.when(gm.getRSGroupOfTable(Mockito.any(TableName.class)))
|
||||
Mockito.when(gm.getRSGroupOfTable(Mockito.any()))
|
||||
.thenAnswer(new Answer<String>() {
|
||||
@Override
|
||||
public String answer(InvocationOnMock invocation) throws Throwable {
|
||||
return tableMap.get(invocation.getArguments()[0]);
|
||||
return tableMap.get(invocation.getArgument(0));
|
||||
}
|
||||
});
|
||||
return gm;
|
||||
|
|
|
@ -605,7 +605,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -23,6 +23,9 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.anyInt;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -402,8 +405,8 @@ public class TestHBaseTestingUtility {
|
|||
|
||||
@Test public void testResolvePortConflict() throws Exception {
|
||||
// raises port conflict between 1st call and 2nd call of randomPort() by mocking Random object
|
||||
Random random = Mockito.mock(Random.class);
|
||||
Mockito.when(random.nextInt(Mockito.any(Integer.class)))
|
||||
Random random = mock(Random.class);
|
||||
when(random.nextInt(anyInt()))
|
||||
.thenAnswer(new Answer<Integer>() {
|
||||
int[] numbers = { 1, 1, 2 };
|
||||
int count = 0;
|
||||
|
@ -417,8 +420,8 @@ public class TestHBaseTestingUtility {
|
|||
});
|
||||
|
||||
HBaseTestingUtility.PortAllocator.AvailablePortChecker portChecker =
|
||||
Mockito.mock(HBaseTestingUtility.PortAllocator.AvailablePortChecker.class);
|
||||
Mockito.when(portChecker.available(Mockito.any(Integer.class))).thenReturn(true);
|
||||
mock(HBaseTestingUtility.PortAllocator.AvailablePortChecker.class);
|
||||
when(portChecker.available(anyInt())).thenReturn(true);
|
||||
|
||||
HBaseTestingUtility.PortAllocator portAllocator =
|
||||
new HBaseTestingUtility.PortAllocator(random, portChecker);
|
||||
|
@ -426,7 +429,7 @@ public class TestHBaseTestingUtility {
|
|||
int port1 = portAllocator.randomFreePort();
|
||||
int port2 = portAllocator.randomFreePort();
|
||||
assertNotEquals(port1, port2);
|
||||
Mockito.verify(random, Mockito.times(3)).nextInt(Mockito.any(Integer.class));
|
||||
Mockito.verify(random, Mockito.times(3)).nextInt(anyInt());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -165,7 +165,7 @@ public class TestMetaTableAccessorNoCluster {
|
|||
.thenThrow(new ServiceException("Server not running (3 of 3)"))
|
||||
.thenAnswer(new Answer<ScanResponse>() {
|
||||
public ScanResponse answer(InvocationOnMock invocation) throws Throwable {
|
||||
((HBaseRpcController) invocation.getArguments()[0]).setCellScanner(CellUtil
|
||||
((HBaseRpcController) invocation.getArgument(0)).setCellScanner(CellUtil
|
||||
.createCellScanner(cellScannables));
|
||||
return builder.setScannerId(1234567890L).setMoreResults(false).build();
|
||||
}
|
||||
|
@ -189,7 +189,7 @@ public class TestMetaTableAccessorNoCluster {
|
|||
|
||||
// Now shove our HRI implementation into the spied-upon connection.
|
||||
Mockito.doReturn(implementation).
|
||||
when(connection).getClient(Mockito.any(ServerName.class));
|
||||
when(connection).getClient(Mockito.any());
|
||||
|
||||
// Scan meta for user tables and verify we got back expected answer.
|
||||
NavigableMap<RegionInfo, Result> hris =
|
||||
|
|
|
@ -250,7 +250,7 @@ public class TestMetaTableLocator {
|
|||
Mockito.mock(AdminProtos.AdminService.BlockingInterface.class);
|
||||
Mockito.when(implementation.getRegionInfo((RpcController)Mockito.any(),
|
||||
(GetRegionInfoRequest)Mockito.any())).thenThrow(connectException);
|
||||
Mockito.when(connection.getAdmin(Mockito.any(ServerName.class))).
|
||||
Mockito.when(connection.getAdmin(Mockito.any())).
|
||||
thenReturn(implementation);
|
||||
RpcControllerFactory controllerFactory = Mockito.mock(RpcControllerFactory.class);
|
||||
Mockito.when(controllerFactory.newController()).thenReturn(
|
||||
|
@ -325,12 +325,12 @@ public class TestMetaTableLocator {
|
|||
thenReturn(anyLocation);
|
||||
if (admin != null) {
|
||||
// If a call to getHRegionConnection, return this implementation.
|
||||
Mockito.when(connection.getAdmin(Mockito.any(ServerName.class))).
|
||||
Mockito.when(connection.getAdmin(Mockito.any())).
|
||||
thenReturn(admin);
|
||||
}
|
||||
if (client != null) {
|
||||
// If a call to getClient, return this implementation.
|
||||
Mockito.when(connection.getClient(Mockito.any(ServerName.class))).
|
||||
Mockito.when(connection.getClient(Mockito.any())).
|
||||
thenReturn(client);
|
||||
}
|
||||
return connection;
|
||||
|
|
|
@ -362,7 +362,7 @@ public class TestZooKeeperTableArchiveClient {
|
|||
public Iterable<FileStatus> answer(InvocationOnMock invocation) throws Throwable {
|
||||
counter[0]++;
|
||||
LOG.debug(counter[0] + "/ " + expected + ") Wrapping call to getDeletableFiles for files: "
|
||||
+ invocation.getArguments()[0]);
|
||||
+ invocation.getArgument(0));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<FileStatus> ret = (Iterable<FileStatus>) invocation.callRealMethod();
|
||||
|
|
|
@ -116,12 +116,12 @@ public class HConnectionTestingUtility {
|
|||
.thenReturn(new RegionLocations(loc));
|
||||
if (admin != null) {
|
||||
// If a call to getAdmin, return this implementation.
|
||||
Mockito.when(c.getAdmin(Mockito.any(ServerName.class))).
|
||||
Mockito.when(c.getAdmin(Mockito.any())).
|
||||
thenReturn(admin);
|
||||
}
|
||||
if (client != null) {
|
||||
// If a call to getClient, return this client.
|
||||
Mockito.when(c.getClient(Mockito.any(ServerName.class))).
|
||||
Mockito.when(c.getClient(Mockito.any())).
|
||||
thenReturn(client);
|
||||
}
|
||||
NonceGenerator ng = Mockito.mock(NonceGenerator.class);
|
||||
|
|
|
@ -96,8 +96,8 @@ public class TestForeignExceptionDispatcher {
|
|||
assertTrue("Monitor didn't get timeout", monitor.hasException());
|
||||
|
||||
// verify that that we propagated the error
|
||||
Mockito.verify(listener1).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener2).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener1).receive(Mockito.any());
|
||||
Mockito.verify(listener2).receive(Mockito.any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -118,7 +118,7 @@ public class TestForeignExceptionDispatcher {
|
|||
timer.start();
|
||||
timer.trigger();
|
||||
// make sure that we got the timer error
|
||||
Mockito.verify(listener1, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener2, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener1, Mockito.times(1)).receive(Mockito.any());
|
||||
Mockito.verify(listener2, Mockito.times(1)).receive(Mockito.any());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ public class TestTimeoutExceptionInjector {
|
|||
TimeoutExceptionInjector timer = new TimeoutExceptionInjector(listener, time);
|
||||
timer.start();
|
||||
timer.trigger();
|
||||
Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -58,7 +58,7 @@ public class TestTimeoutExceptionInjector {
|
|||
TimeoutExceptionInjector timer = new TimeoutExceptionInjector(listener, time);
|
||||
timer.start();
|
||||
timer.trigger();
|
||||
Mockito.verify(listener).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener).receive(Mockito.any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -98,7 +98,7 @@ public class TestTimeoutExceptionInjector {
|
|||
LOG.debug("Correctly failed timer: " + e.getMessage());
|
||||
}
|
||||
Thread.sleep(time * 2);
|
||||
Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any(ForeignException.class));
|
||||
Mockito.verify(listener, Mockito.times(1)).receive(Mockito.any());
|
||||
Mockito.verifyNoMoreInteractions(listener);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ import static org.junit.Assert.assertFalse;
|
|||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
@ -42,7 +41,6 @@ import java.util.ArrayList;
|
|||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -56,8 +54,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandlerImpl;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
|
||||
|
@ -182,7 +178,6 @@ public class TestSimpleRpcScheduler {
|
|||
|
||||
@Test
|
||||
public void testHandlerIsolation() throws IOException, InterruptedException {
|
||||
|
||||
CallRunner generalTask = createMockTask();
|
||||
CallRunner priorityTask = createMockTask();
|
||||
CallRunner replicationTask = createMockTask();
|
||||
|
@ -219,9 +214,7 @@ public class TestSimpleRpcScheduler {
|
|||
scheduler.init(CONTEXT);
|
||||
scheduler.start();
|
||||
for (CallRunner task : tasks) {
|
||||
when(qosFunction.getPriority((RPCProtos.RequestHeader) anyObject(),
|
||||
(Message) anyObject(), (User) anyObject()))
|
||||
.thenReturn(qos.get(task));
|
||||
when(qosFunction.getPriority(any(), any(), any())).thenReturn(qos.get(task));
|
||||
scheduler.dispatch(task);
|
||||
}
|
||||
for (CallRunner task : tasks) {
|
||||
|
@ -238,13 +231,11 @@ public class TestSimpleRpcScheduler {
|
|||
ServerCall call = mock(ServerCall.class);
|
||||
CallRunner task = mock(CallRunner.class);
|
||||
when(task.getRpcCall()).thenReturn(call);
|
||||
when(call.getRequestUser()).thenReturn(Optional.empty());
|
||||
return task;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRpcScheduler() throws Exception {
|
||||
|
||||
testRpcScheduler(RpcExecutor.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE);
|
||||
testRpcScheduler(RpcExecutor.CALL_QUEUE_TYPE_FIFO_CONF_VALUE);
|
||||
}
|
||||
|
@ -254,9 +245,7 @@ public class TestSimpleRpcScheduler {
|
|||
schedConf.set(RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY, queueType);
|
||||
|
||||
PriorityFunction priority = mock(PriorityFunction.class);
|
||||
when(priority.getPriority(any(RequestHeader.class),
|
||||
any(Message.class), any(User.class)))
|
||||
.thenReturn(HConstants.NORMAL_QOS);
|
||||
when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
|
||||
|
||||
RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 1, 1, 1, priority,
|
||||
HConstants.QOS_THRESHOLD);
|
||||
|
@ -268,25 +257,22 @@ public class TestSimpleRpcScheduler {
|
|||
RequestHeader smallHead = RequestHeader.newBuilder().setCallId(1).build();
|
||||
when(smallCallTask.getRpcCall()).thenReturn(smallCall);
|
||||
when(smallCall.getHeader()).thenReturn(smallHead);
|
||||
when(smallCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
CallRunner largeCallTask = mock(CallRunner.class);
|
||||
ServerCall largeCall = mock(ServerCall.class);
|
||||
RequestHeader largeHead = RequestHeader.newBuilder().setCallId(50).build();
|
||||
when(largeCallTask.getRpcCall()).thenReturn(largeCall);
|
||||
when(largeCall.getHeader()).thenReturn(largeHead);
|
||||
when(largeCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
CallRunner hugeCallTask = mock(CallRunner.class);
|
||||
ServerCall hugeCall = mock(ServerCall.class);
|
||||
RequestHeader hugeHead = RequestHeader.newBuilder().setCallId(100).build();
|
||||
when(hugeCallTask.getRpcCall()).thenReturn(hugeCall);
|
||||
when(hugeCall.getHeader()).thenReturn(hugeHead);
|
||||
when(hugeCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
when(priority.getDeadline(eq(smallHead), any(Message.class))).thenReturn(0L);
|
||||
when(priority.getDeadline(eq(largeHead), any(Message.class))).thenReturn(50L);
|
||||
when(priority.getDeadline(eq(hugeHead), any(Message.class))).thenReturn(100L);
|
||||
when(priority.getDeadline(eq(smallHead), any())).thenReturn(0L);
|
||||
when(priority.getDeadline(eq(largeHead), any())).thenReturn(50L);
|
||||
when(priority.getDeadline(eq(hugeHead), any())).thenReturn(100L);
|
||||
|
||||
final ArrayList<Integer> work = new ArrayList<>();
|
||||
doAnswerTaskExecution(smallCallTask, work, 10, 250);
|
||||
|
@ -337,8 +323,7 @@ public class TestSimpleRpcScheduler {
|
|||
schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_SCAN_SHARE_CONF_KEY, 0f);
|
||||
|
||||
PriorityFunction priority = mock(PriorityFunction.class);
|
||||
when(priority.getPriority(any(RequestHeader.class), any(Message.class),
|
||||
any(User.class))).thenReturn(HConstants.NORMAL_QOS);
|
||||
when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
|
||||
|
||||
RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 2, 1, 1, priority,
|
||||
HConstants.QOS_THRESHOLD);
|
||||
|
@ -353,8 +338,7 @@ public class TestSimpleRpcScheduler {
|
|||
schedConf.setFloat(RWQueueRpcExecutor.CALL_QUEUE_SCAN_SHARE_CONF_KEY, 0.5f);
|
||||
|
||||
PriorityFunction priority = mock(PriorityFunction.class);
|
||||
when(priority.getPriority(any(RPCProtos.RequestHeader.class), any(Message.class),
|
||||
any(User.class))).thenReturn(HConstants.NORMAL_QOS);
|
||||
when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
|
||||
|
||||
RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 3, 1, 1, priority,
|
||||
HConstants.QOS_THRESHOLD);
|
||||
|
@ -369,14 +353,12 @@ public class TestSimpleRpcScheduler {
|
|||
when(putCallTask.getRpcCall()).thenReturn(putCall);
|
||||
when(putCall.getHeader()).thenReturn(putHead);
|
||||
when(putCall.getParam()).thenReturn(putCall.param);
|
||||
when(putCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
CallRunner getCallTask = mock(CallRunner.class);
|
||||
ServerCall getCall = mock(ServerCall.class);
|
||||
RequestHeader getHead = RequestHeader.newBuilder().setMethodName("get").build();
|
||||
when(getCallTask.getRpcCall()).thenReturn(getCall);
|
||||
when(getCall.getHeader()).thenReturn(getHead);
|
||||
when(getCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
CallRunner scanCallTask = mock(CallRunner.class);
|
||||
ServerCall scanCall = mock(ServerCall.class);
|
||||
|
@ -385,7 +367,6 @@ public class TestSimpleRpcScheduler {
|
|||
when(scanCallTask.getRpcCall()).thenReturn(scanCall);
|
||||
when(scanCall.getHeader()).thenReturn(scanHead);
|
||||
when(scanCall.getParam()).thenReturn(scanCall.param);
|
||||
when(scanCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
ArrayList<Integer> work = new ArrayList<>();
|
||||
doAnswerTaskExecution(putCallTask, work, 1, 1000);
|
||||
|
@ -449,8 +430,7 @@ public class TestSimpleRpcScheduler {
|
|||
schedConf.setInt("hbase.ipc.server.max.callqueue.length", 5);
|
||||
|
||||
PriorityFunction priority = mock(PriorityFunction.class);
|
||||
when(priority.getPriority(any(RequestHeader.class), any(Message.class),
|
||||
any(User.class))).thenReturn(HConstants.NORMAL_QOS);
|
||||
when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
|
||||
SimpleRpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 0, 0, 0, priority,
|
||||
HConstants.QOS_THRESHOLD);
|
||||
try {
|
||||
|
@ -463,7 +443,6 @@ public class TestSimpleRpcScheduler {
|
|||
RequestHeader putHead = RequestHeader.newBuilder().setMethodName("mutate").build();
|
||||
when(putCallTask.getRpcCall()).thenReturn(putCall);
|
||||
when(putCall.getHeader()).thenReturn(putHead);
|
||||
when(putCall.getRequestUser()).thenReturn(Optional.empty());
|
||||
|
||||
assertTrue(scheduler.dispatch(putCallTask));
|
||||
|
||||
|
@ -516,8 +495,7 @@ public class TestSimpleRpcScheduler {
|
|||
schedConf.set(RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY,
|
||||
RpcExecutor.CALL_QUEUE_TYPE_CODEL_CONF_VALUE);
|
||||
PriorityFunction priority = mock(PriorityFunction.class);
|
||||
when(priority.getPriority(any(RPCProtos.RequestHeader.class), any(Message.class),
|
||||
any(User.class))).thenReturn(HConstants.NORMAL_QOS);
|
||||
when(priority.getPriority(any(), any(), any())).thenReturn(HConstants.NORMAL_QOS);
|
||||
SimpleRpcScheduler scheduler =
|
||||
new SimpleRpcScheduler(schedConf, 1, 1, 1, priority, HConstants.QOS_THRESHOLD);
|
||||
try {
|
||||
|
|
|
@ -145,7 +145,7 @@ public class TestSplitLogManager {
|
|||
|
||||
// By default, we let the test manage the error as before, so the server
|
||||
// does not appear as dead from the master point of view, only from the split log pov.
|
||||
Mockito.when(sm.isServerOnline(Mockito.any(ServerName.class))).thenReturn(true);
|
||||
Mockito.when(sm.isServerOnline(Mockito.any())).thenReturn(true);
|
||||
|
||||
to = 12000;
|
||||
conf.setInt(HConstants.HBASE_SPLITLOG_MANAGER_TIMEOUT, to);
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.master.assignment;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
@ -55,23 +57,21 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
|||
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
|
||||
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
|
||||
import org.apache.hadoop.hbase.security.Superusers;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
|
||||
/**
|
||||
* A mocked master services.
|
||||
|
@ -136,17 +136,15 @@ public class MockMasterServices extends MockNoopMasterServices {
|
|||
MutateResponse.Builder builder = MutateResponse.newBuilder();
|
||||
builder.setProcessed(true);
|
||||
try {
|
||||
Mockito.when(ri.mutate((RpcController)Mockito.any(), (MutateRequest)Mockito.any())).
|
||||
thenReturn(builder.build());
|
||||
Mockito.when(ri.mutate(any(), any())).thenReturn(builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufUtil.handleRemoteException(se);
|
||||
}
|
||||
try {
|
||||
Mockito.when(ri.multi((RpcController)Mockito.any(), (MultiRequest)Mockito.any())).
|
||||
thenAnswer(new Answer<MultiResponse>() {
|
||||
Mockito.when(ri.multi(any(), any())).thenAnswer(new Answer<MultiResponse>() {
|
||||
@Override
|
||||
public MultiResponse answer(InvocationOnMock invocation) throws Throwable {
|
||||
return buildMultiResponse( (MultiRequest)invocation.getArguments()[1]);
|
||||
return buildMultiResponse(invocation.getArgument(1));
|
||||
}
|
||||
});
|
||||
} catch (ServiceException se) {
|
||||
|
|
|
@ -216,7 +216,7 @@ public class TestCleanerChore {
|
|||
FSUtils.logFileSystemState(fs, testDir, LOG);
|
||||
return (Boolean) invocation.callRealMethod();
|
||||
}
|
||||
}).when(spy).isFileDeletable(Mockito.any(FileStatus.class));
|
||||
}).when(spy).isFileDeletable(Mockito.any());
|
||||
|
||||
// run the chore
|
||||
chore.chore();
|
||||
|
@ -225,7 +225,7 @@ public class TestCleanerChore {
|
|||
assertTrue("Added file unexpectedly deleted", fs.exists(addedFile));
|
||||
assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
|
||||
assertFalse("Original file unexpectedly retained", fs.exists(file));
|
||||
Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(FileStatus.class));
|
||||
Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any());
|
||||
Mockito.reset(spy);
|
||||
}
|
||||
|
||||
|
@ -274,7 +274,7 @@ public class TestCleanerChore {
|
|||
FSUtils.logFileSystemState(fs, testDir, LOG);
|
||||
return (Boolean) invocation.callRealMethod();
|
||||
}
|
||||
}).when(spy).isFileDeletable(Mockito.any(FileStatus.class));
|
||||
}).when(spy).isFileDeletable(Mockito.any());
|
||||
|
||||
// attempt to delete the directory, which
|
||||
if (chore.checkAndDeleteDirectory(parent)) {
|
||||
|
@ -286,7 +286,7 @@ public class TestCleanerChore {
|
|||
assertTrue("Added file unexpectedly deleted", fs.exists(racyFile));
|
||||
assertTrue("Parent directory deleted unexpectedly", fs.exists(parent));
|
||||
assertFalse("Original file unexpectedly retained", fs.exists(file));
|
||||
Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(FileStatus.class));
|
||||
Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
|
|||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.master.MasterRpcServices;
|
||||
import org.apache.hadoop.hbase.master.MasterServices;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -49,10 +51,6 @@ import org.junit.experimental.categories.Category;
|
|||
import org.junit.rules.TestName;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;
|
||||
|
||||
/**
|
||||
* Tests logic of {@link SimpleRegionNormalizer}.
|
||||
|
@ -353,9 +351,9 @@ public class TestSimpleRegionNormalizer {
|
|||
// for simplicity all regions are assumed to be on one server; doesn't matter to us
|
||||
ServerName sn = ServerName.valueOf("localhost", 0, 1L);
|
||||
when(masterServices.getAssignmentManager().getRegionStates().
|
||||
getRegionsOfTable(any(TableName.class))).thenReturn(RegionInfo);
|
||||
getRegionsOfTable(any())).thenReturn(RegionInfo);
|
||||
when(masterServices.getAssignmentManager().getRegionStates().
|
||||
getRegionServerOfRegion(any(RegionInfo.class))).thenReturn(sn);
|
||||
getRegionServerOfRegion(any())).thenReturn(sn);
|
||||
|
||||
for (Map.Entry<byte[], Integer> region : regionSizes.entrySet()) {
|
||||
RegionLoad regionLoad = Mockito.mock(RegionLoad.class);
|
||||
|
@ -366,8 +364,8 @@ public class TestSimpleRegionNormalizer {
|
|||
getRegionsLoad().get(region.getKey())).thenReturn(regionLoad);
|
||||
}
|
||||
try {
|
||||
when(masterRpcServices.isSplitOrMergeEnabled(any(RpcController.class),
|
||||
any(IsSplitOrMergeEnabledRequest.class))).thenReturn(
|
||||
when(masterRpcServices.isSplitOrMergeEnabled(any(),
|
||||
any())).thenReturn(
|
||||
IsSplitOrMergeEnabledResponse.newBuilder().setEnabled(true).build());
|
||||
} catch (ServiceException se) {
|
||||
LOG.debug("error setting isSplitOrMergeEnabled switch", se);
|
||||
|
|
|
@ -124,7 +124,7 @@ public class TestProcedure {
|
|||
proc.completedProcedure.await();
|
||||
verify(procspy).sendGlobalBarrierReached();
|
||||
verify(procspy).sendGlobalBarrierComplete();
|
||||
verify(procspy, never()).receive(any(ForeignException.class));
|
||||
verify(procspy, never()).receive(any());
|
||||
}
|
||||
|
||||
@Test(timeout = 60000)
|
||||
|
@ -176,7 +176,7 @@ public class TestProcedure {
|
|||
procspy.completedProcedure.await();
|
||||
verify(procspy).sendGlobalBarrierReached();
|
||||
verify(procspy).sendGlobalBarrierComplete();
|
||||
verify(procspy, never()).receive(any(ForeignException.class));
|
||||
verify(procspy, never()).receive(any());
|
||||
}
|
||||
|
||||
@Test(timeout = 60000)
|
||||
|
|
|
@ -41,10 +41,10 @@ import java.util.List;
|
|||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignException;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -52,8 +52,6 @@ import org.mockito.InOrder;
|
|||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
|
||||
/**
|
||||
* Test Procedure coordinator operation.
|
||||
* <p>
|
||||
|
@ -106,7 +104,7 @@ public class TestProcedureCoordinator {
|
|||
Procedure proc2 = new Procedure(coordinator, monitor,
|
||||
WAKE_FREQUENCY, TIMEOUT, procName +"2", procData, expected);
|
||||
Procedure procSpy2 = spy(proc2);
|
||||
when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
.thenReturn(procSpy, procSpy2);
|
||||
|
||||
coordinator.startProcedure(procSpy.getErrorMonitor(), procName, procData, expected);
|
||||
|
@ -127,7 +125,7 @@ public class TestProcedureCoordinator {
|
|||
TIMEOUT, procName, procData, expected);
|
||||
final Procedure procSpy = spy(proc);
|
||||
|
||||
when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
.thenReturn(procSpy);
|
||||
|
||||
// use the passed controller responses
|
||||
|
@ -139,10 +137,10 @@ public class TestProcedureCoordinator {
|
|||
proc = coordinator.startProcedure(proc.getErrorMonitor(), procName, procData, expected);
|
||||
// and wait for it to finish
|
||||
while(!proc.completedLatch.await(WAKE_FREQUENCY, TimeUnit.MILLISECONDS));
|
||||
verify(procSpy, atLeastOnce()).receive(any(ForeignException.class));
|
||||
verify(procSpy, atLeastOnce()).receive(any());
|
||||
verify(coordinator, times(1)).rpcConnectionFailure(anyString(), eq(cause));
|
||||
verify(controller, times(1)).sendGlobalBarrierAcquire(procSpy, procData, expected);
|
||||
verify(controller, never()).sendGlobalBarrierReached(any(Procedure.class),
|
||||
verify(controller, never()).sendGlobalBarrierReached(any(),
|
||||
anyListOf(String.class));
|
||||
}
|
||||
|
||||
|
@ -158,7 +156,7 @@ public class TestProcedureCoordinator {
|
|||
final Procedure spy = spy(new Procedure(coordinator,
|
||||
WAKE_FREQUENCY, TIMEOUT, procName, procData, expected));
|
||||
|
||||
when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
.thenReturn(spy);
|
||||
|
||||
// use the passed controller responses
|
||||
|
@ -171,11 +169,11 @@ public class TestProcedureCoordinator {
|
|||
Procedure task = coordinator.startProcedure(spy.getErrorMonitor(), procName, procData, expected);
|
||||
// and wait for it to finish
|
||||
while(!task.completedLatch.await(WAKE_FREQUENCY, TimeUnit.MILLISECONDS));
|
||||
verify(spy, atLeastOnce()).receive(any(ForeignException.class));
|
||||
verify(spy, atLeastOnce()).receive(any());
|
||||
verify(coordinator, times(1)).rpcConnectionFailure(anyString(), eq(cause));
|
||||
verify(controller, times(1)).sendGlobalBarrierAcquire(eq(spy),
|
||||
eq(procData), anyListOf(String.class));
|
||||
verify(controller, times(1)).sendGlobalBarrierReached(any(Procedure.class),
|
||||
verify(controller, times(1)).sendGlobalBarrierReached(any(),
|
||||
anyListOf(String.class));
|
||||
}
|
||||
|
||||
|
@ -267,7 +265,7 @@ public class TestProcedureCoordinator {
|
|||
public void runCoordinatedOperation(Procedure spy, AcquireBarrierAnswer prepareOperation,
|
||||
BarrierAnswer commitOperation, String... cohort) throws Exception {
|
||||
List<String> expected = Arrays.asList(cohort);
|
||||
when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
when(coordinator.createProcedure(any(), eq(procName), eq(procData), anyListOf(String.class)))
|
||||
.thenReturn(spy);
|
||||
|
||||
// use the passed controller responses
|
||||
|
|
|
@ -117,7 +117,7 @@ public class TestProcedureMember {
|
|||
member.receivedReachedGlobalBarrier(op);
|
||||
return null;
|
||||
}
|
||||
}).when(mockMemberComms).sendMemberAcquired(any(Subprocedure.class));
|
||||
}).when(mockMemberComms).sendMemberAcquired(any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -147,7 +147,7 @@ public class TestProcedureMember {
|
|||
order.verify(spy).insideBarrier();
|
||||
order.verify(mockMemberComms).sendMemberCompleted(eq(spy), eq(data));
|
||||
order.verify(mockMemberComms, never()).sendMemberAborted(eq(spy),
|
||||
any(ForeignException.class));
|
||||
any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -182,8 +182,8 @@ public class TestProcedureMember {
|
|||
order.verify(spySub, never()).insideBarrier();
|
||||
order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
|
||||
// error recovery path exercised
|
||||
order.verify(spySub).cancel(anyString(), any(Exception.class));
|
||||
order.verify(spySub).cleanup(any(Exception.class));
|
||||
order.verify(spySub).cancel(anyString(), any());
|
||||
order.verify(spySub).cleanup(any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -200,7 +200,7 @@ public class TestProcedureMember {
|
|||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
throw new IOException("Forced IOException in memeber prepare");
|
||||
}
|
||||
}).when(mockMemberComms).sendMemberAcquired(any(Subprocedure.class));
|
||||
}).when(mockMemberComms).sendMemberAcquired(any());
|
||||
|
||||
// run the operation
|
||||
// build a new operation
|
||||
|
@ -218,8 +218,8 @@ public class TestProcedureMember {
|
|||
order.verify(spySub, never()).insideBarrier();
|
||||
order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
|
||||
// error recovery path exercised
|
||||
order.verify(spySub).cancel(anyString(), any(Exception.class));
|
||||
order.verify(spySub).cleanup(any(Exception.class));
|
||||
order.verify(spySub).cancel(anyString(), any());
|
||||
order.verify(spySub).cleanup(any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -261,8 +261,8 @@ public class TestProcedureMember {
|
|||
order.verify(spySub, never()).insideBarrier();
|
||||
order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
|
||||
// error recovery path exercised
|
||||
order.verify(spySub).cancel(anyString(), any(Exception.class));
|
||||
order.verify(spySub).cleanup(any(Exception.class));
|
||||
order.verify(spySub).cancel(anyString(), any());
|
||||
order.verify(spySub).cleanup(any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -302,8 +302,8 @@ public class TestProcedureMember {
|
|||
// Later phases not run
|
||||
order.verify(mockMemberComms, never()).sendMemberCompleted(eq(spySub), eq(data));
|
||||
// error recovery path exercised
|
||||
order.verify(spySub).cancel(anyString(), any(Exception.class));
|
||||
order.verify(spySub).cleanup(any(Exception.class));
|
||||
order.verify(spySub).cancel(anyString(), any());
|
||||
order.verify(spySub).cleanup(any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -328,7 +328,7 @@ public class TestProcedureMember {
|
|||
Thread.sleep(WAKE_FREQUENCY);
|
||||
return null;
|
||||
}
|
||||
}).when(mockMemberComms).sendMemberCompleted(any(Subprocedure.class), eq(data));
|
||||
}).when(mockMemberComms).sendMemberCompleted(any(), eq(data));
|
||||
|
||||
// run the operation
|
||||
// build a new operation
|
||||
|
@ -344,8 +344,8 @@ public class TestProcedureMember {
|
|||
order.verify(spySub).insideBarrier();
|
||||
order.verify(mockMemberComms).sendMemberCompleted(eq(spySub), eq(data));
|
||||
// error recovery path exercised
|
||||
order.verify(spySub).cancel(anyString(), any(Exception.class));
|
||||
order.verify(spySub).cleanup(any(Exception.class));
|
||||
order.verify(spySub).cancel(anyString(), any());
|
||||
order.verify(spySub).cleanup(any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -369,7 +369,7 @@ public class TestProcedureMember {
|
|||
doThrow(new ForeignException("SRC", "prepare exception")).when(spy).acquireBarrier();
|
||||
// and throw a connection error when we try to tell the controller about it
|
||||
doThrow(new IOException("Controller is down!")).when(mockMemberComms)
|
||||
.sendMemberAborted(eq(spy), any(ForeignException.class));
|
||||
.sendMemberAborted(eq(spy), any());
|
||||
|
||||
|
||||
// run the operation
|
||||
|
@ -388,9 +388,9 @@ public class TestProcedureMember {
|
|||
// TODO Need to do another refactor to get this to propagate to the coordinator.
|
||||
// make sure we pass a remote exception back the controller
|
||||
// order.verify(mockMemberComms).sendMemberAborted(eq(spy),
|
||||
// any(ExternalException.class));
|
||||
// any());
|
||||
// order.verify(dispSpy).receiveError(anyString(),
|
||||
// any(ExternalException.class), any());
|
||||
// any(), any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -427,7 +427,7 @@ public class TestProcedureMember {
|
|||
verifyZeroInteractions(pool);
|
||||
// get two abort requests
|
||||
// TODO Need to do another refactor to get this to propagate to the coordinator.
|
||||
// verify(mockMemberComms, times(2)).sendMemberAborted(any(Subprocedure.class), any(ExternalException.class));
|
||||
// verify(mockMemberComms, times(2)).sendMemberAborted(any(), any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -284,7 +284,7 @@ public class TestZKProcedure {
|
|||
Procedure coordinatorTask = Mockito.spy(new Procedure(coordinator,
|
||||
coordinatorTaskErrorMonitor, WAKE_FREQUENCY, TIMEOUT,
|
||||
opName, data, expected));
|
||||
when(coordinator.createProcedure(any(ForeignExceptionDispatcher.class), eq(opName), eq(data), anyListOf(String.class)))
|
||||
when(coordinator.createProcedure(any(), eq(opName), eq(data), anyListOf(String.class)))
|
||||
.thenReturn(coordinatorTask);
|
||||
// count down the error latch when we get the remote error
|
||||
Mockito.doAnswer(new Answer<Void>() {
|
||||
|
@ -296,7 +296,7 @@ public class TestZKProcedure {
|
|||
coordinatorReceivedErrorLatch.countDown();
|
||||
return null;
|
||||
}
|
||||
}).when(coordinatorTask).receive(Mockito.any(ForeignException.class));
|
||||
}).when(coordinatorTask).receive(Mockito.any());
|
||||
|
||||
// ----------------------------
|
||||
// start running the operation
|
||||
|
|
|
@ -31,7 +31,6 @@ import java.util.concurrent.CountDownLatch;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignException;
|
||||
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
|
@ -131,10 +130,10 @@ public class TestZKProcedureControllers {
|
|||
LOG.debug("Commit node:" + commit + ", exists:" + ZKUtil.checkExists(watcher, commit));
|
||||
committed.await();
|
||||
|
||||
verify(monitor, never()).receive(Mockito.any(ForeignException.class));
|
||||
verify(monitor, never()).receive(Mockito.any());
|
||||
// XXX: broken due to composition.
|
||||
// verify(member, never()).getManager().controllerConnectionFailure(Mockito.anyString(),
|
||||
// Mockito.any(IOException.class));
|
||||
// Mockito.any());
|
||||
// cleanup after the test
|
||||
ZKUtil.deleteNodeRecursively(watcher, controller.getZkController().getBaseZnode());
|
||||
assertEquals("Didn't delete prepare node", -1, ZKUtil.checkExists(watcher, prepare));
|
||||
|
@ -363,7 +362,7 @@ public class TestZKProcedureControllers {
|
|||
// verify(member, Mockito.times(cohortSize)).submitSubprocedure(Mockito.eq(operationName),
|
||||
// (byte[]) Mockito.argThat(new ArrayEquals(data)));
|
||||
Mockito.verify(member,
|
||||
Mockito.atLeast(cohortSize)).submitSubprocedure(Mockito.any(Subprocedure.class));
|
||||
Mockito.atLeast(cohortSize)).submitSubprocedure(Mockito.any());
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TestFileSystemUtilizationChore {
|
|||
final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(sum(regionSizes)))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
final Region region = mockRegionWithSize(regionSizes);
|
||||
Mockito.doReturn(Arrays.asList(region)).when(rs).getRegions();
|
||||
|
@ -78,7 +78,7 @@ public class TestFileSystemUtilizationChore {
|
|||
final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(sum(regionSizes)))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
final Region region = mockRegionWithSize(regionSizes);
|
||||
Mockito.doReturn(Arrays.asList(region)).when(rs).getRegions();
|
||||
|
@ -102,7 +102,7 @@ public class TestFileSystemUtilizationChore {
|
|||
final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(r1Sum, r2Sum, r3Sum))))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
final Region r1 = mockRegionWithSize(r1Sizes);
|
||||
final Region r2 = mockRegionWithSize(r2Sizes);
|
||||
|
@ -167,7 +167,7 @@ public class TestFileSystemUtilizationChore {
|
|||
};
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(leftover1Sum, leftover2Sum))))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
// We shouldn't compute all of these region sizes, just the leftovers
|
||||
final Region r1 = mockRegionWithSize(Arrays.asList(1024L, 2048L));
|
||||
|
@ -199,7 +199,7 @@ public class TestFileSystemUtilizationChore {
|
|||
};
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(leftover1Sum))))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
// We shouldn't compute all of these region sizes, just the leftovers
|
||||
final Region r1 = mockRegionWithSize(Arrays.asList(1024L, 2048L));
|
||||
|
@ -225,7 +225,7 @@ public class TestFileSystemUtilizationChore {
|
|||
final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(sum(Arrays.asList(r1Sum))))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
final Region r1 = mockRegionWithSize(r1Sizes);
|
||||
final Region r2 = mockSplitParentRegionWithSize(r2Sizes);
|
||||
|
@ -247,7 +247,7 @@ public class TestFileSystemUtilizationChore {
|
|||
final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(r1Sum))
|
||||
.when(rs)
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
final Region r1 = mockRegionWithSize(r1Sizes);
|
||||
final Region r2 = mockRegionReplicaWithSize(r2Sizes);
|
||||
|
@ -274,7 +274,7 @@ public class TestFileSystemUtilizationChore {
|
|||
final FileSystemUtilizationChore chore = new FileSystemUtilizationChore(rs);
|
||||
doAnswer(new ExpectedRegionSizeSummationAnswer(
|
||||
sum(Arrays.asList(r1HFileSizeSum, r2HFileSizeSum))))
|
||||
.when(rs).reportRegionSizesForQuotas((Map<RegionInfo,Long>) any(Map.class));
|
||||
.when(rs).reportRegionSizesForQuotas((Map<RegionInfo,Long>) any());
|
||||
|
||||
final Region r1 = mockRegionWithHFileLinks(r1StoreFileSizes, r1HFileSizes);
|
||||
final Region r2 = mockRegionWithHFileLinks(r2StoreFileSizes, r2HFileSizes);
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestMasterSpaceQuotaObserverWithMocks {
|
|||
conf = HBaseConfiguration.create();
|
||||
master = mock(HMaster.class);
|
||||
doCallRealMethod().when(master).updateConfigurationForSpaceQuotaObserver(
|
||||
any(Configuration.class));
|
||||
any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -73,7 +73,7 @@ public class TestNamespaceQuotaViolationStore {
|
|||
@Test
|
||||
public void testGetSpaceQuota() throws Exception {
|
||||
NamespaceQuotaSnapshotStore mockStore = mock(NamespaceQuotaSnapshotStore.class);
|
||||
when(mockStore.getSpaceQuota(any(String.class))).thenCallRealMethod();
|
||||
when(mockStore.getSpaceQuota(any())).thenCallRealMethod();
|
||||
|
||||
Quotas quotaWithSpace = Quotas.newBuilder().setSpace(
|
||||
SpaceQuota.newBuilder()
|
||||
|
@ -84,7 +84,7 @@ public class TestNamespaceQuotaViolationStore {
|
|||
Quotas quotaWithoutSpace = Quotas.newBuilder().build();
|
||||
|
||||
AtomicReference<Quotas> quotaRef = new AtomicReference<>();
|
||||
when(mockStore.getQuotaForNamespace(any(String.class))).then(new Answer<Quotas>() {
|
||||
when(mockStore.getQuotaForNamespace(any())).then(new Answer<Quotas>() {
|
||||
@Override
|
||||
public Quotas answer(InvocationOnMock invocation) throws Throwable {
|
||||
return quotaRef.get();
|
||||
|
|
|
@ -77,8 +77,8 @@ public class TestSpaceQuotaViolationPolicyRefresherChore {
|
|||
when(chore.getConnection()).thenReturn(conn);
|
||||
when(chore.getManager()).thenReturn(manager);
|
||||
doCallRealMethod().when(chore).chore();
|
||||
when(chore.isInViolation(any(SpaceQuotaSnapshot.class))).thenCallRealMethod();
|
||||
doCallRealMethod().when(chore).extractQuotaSnapshot(any(Result.class), any(Map.class));
|
||||
when(chore.isInViolation(any())).thenCallRealMethod();
|
||||
doCallRealMethod().when(chore).extractQuotaSnapshot(any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -165,7 +165,7 @@ public class TestTableQuotaViolationStore {
|
|||
@Test
|
||||
public void testGetSpaceQuota() throws Exception {
|
||||
TableQuotaSnapshotStore mockStore = mock(TableQuotaSnapshotStore.class);
|
||||
when(mockStore.getSpaceQuota(any(TableName.class))).thenCallRealMethod();
|
||||
when(mockStore.getSpaceQuota(any())).thenCallRealMethod();
|
||||
|
||||
Quotas quotaWithSpace = Quotas.newBuilder().setSpace(
|
||||
SpaceQuota.newBuilder()
|
||||
|
@ -176,7 +176,7 @@ public class TestTableQuotaViolationStore {
|
|||
Quotas quotaWithoutSpace = Quotas.newBuilder().build();
|
||||
|
||||
AtomicReference<Quotas> quotaRef = new AtomicReference<>();
|
||||
when(mockStore.getQuotaForTable(any(TableName.class))).then(new Answer<Quotas>() {
|
||||
when(mockStore.getQuotaForTable(any())).then(new Answer<Quotas>() {
|
||||
@Override
|
||||
public Quotas answer(InvocationOnMock invocation) throws Throwable {
|
||||
return quotaRef.get();
|
||||
|
|
|
@ -77,22 +77,13 @@ public class TestTableSpaceQuotaViolationNotifier {
|
|||
|
||||
notifier.transitionTable(tn, snapshot);
|
||||
|
||||
verify(quotaTable).put(argThat(new SingleCellPutMatcher(expectedPut)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameterized for Puts.
|
||||
*/
|
||||
private static class SingleCellPutMatcher extends SingleCellMutationMatcher<Put> {
|
||||
private SingleCellPutMatcher(Put expected) {
|
||||
super(expected);
|
||||
}
|
||||
verify(quotaTable).put(argThat(new SingleCellMutationMatcher<Put>(expectedPut)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick hack to verify a Mutation with one column.
|
||||
*/
|
||||
private static class SingleCellMutationMatcher<T> extends ArgumentMatcher<T> {
|
||||
final private static class SingleCellMutationMatcher<T> implements ArgumentMatcher<T> {
|
||||
private final Mutation expected;
|
||||
|
||||
private SingleCellMutationMatcher(Mutation expected) {
|
||||
|
@ -100,7 +91,7 @@ public class TestTableSpaceQuotaViolationNotifier {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
public boolean matches(T argument) {
|
||||
if (!expected.getClass().isAssignableFrom(argument.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.anyInt;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyInt;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
@ -27,8 +27,6 @@ import static org.mockito.Mockito.when;
|
|||
import java.util.Optional;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
|
@ -42,37 +40,23 @@ public class StatefulStoreMockMaker {
|
|||
return Optional.empty();
|
||||
}
|
||||
|
||||
public void cancelCompaction(Object originalContext) {
|
||||
}
|
||||
public void cancelCompaction(Object originalContext) {}
|
||||
|
||||
public int getPriority() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
private class SelectAnswer implements Answer<Optional<CompactionContext>> {
|
||||
public Optional<CompactionContext> answer(InvocationOnMock invocation) throws Throwable {
|
||||
return selectCompaction();
|
||||
}
|
||||
}
|
||||
|
||||
private class PriorityAnswer implements Answer<Integer> {
|
||||
public Integer answer(InvocationOnMock invocation) throws Throwable {
|
||||
return getPriority();
|
||||
}
|
||||
}
|
||||
private class CancelAnswer implements Answer<Object> {
|
||||
public CompactionContext answer(InvocationOnMock invocation) throws Throwable {
|
||||
cancelCompaction(invocation.getArguments()[0]); return null;
|
||||
cancelCompaction(invocation.getArgument(0));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public HStore createStoreMock(String name) throws Exception {
|
||||
HStore store = mock(HStore.class, name);
|
||||
when(store.requestCompaction(anyInt(), any(CompactionLifeCycleTracker.class), any(User.class)))
|
||||
.then(new SelectAnswer());
|
||||
when(store.getCompactPriority()).then(new PriorityAnswer());
|
||||
doAnswer(new CancelAnswer()).when(store)
|
||||
.cancelRequestedCompaction(any(CompactionContext.class));
|
||||
when(store.requestCompaction(anyInt(), any(), any())).then(inv -> selectCompaction());
|
||||
when(store.getCompactPriority()).then(inv -> getPriority());
|
||||
doAnswer(new CancelAnswer()).when(store).cancelRequestedCompaction(any());
|
||||
return store;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,13 @@ import static java.util.Arrays.asList;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||
import static org.mockito.ArgumentMatchers.anyLong;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.hamcrest.MockitoHamcrest.argThat;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
|
@ -31,7 +38,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -55,17 +61,6 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.wal.WAL;
|
||||
import org.apache.hadoop.hbase.wal.WALKey;
|
||||
import org.hamcrest.Description;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.TypeSafeMatcher;
|
||||
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
|
@ -73,6 +68,11 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.rules.TestName;
|
||||
import org.hamcrest.Description;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.TypeSafeMatcher;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
/**
|
||||
* This class attempts to unit test bulk HLog loading.
|
||||
|
@ -108,12 +108,12 @@ public class TestBulkLoad {
|
|||
storeFileName = (new Path(storeFileName)).getName();
|
||||
List<String> storeFileNames = new ArrayList<>();
|
||||
storeFileNames.add(storeFileName);
|
||||
when(log.append(any(HRegionInfo.class), any(WALKey.class),
|
||||
when(log.append(any(), any(),
|
||||
argThat(bulkLogWalEdit(WALEdit.BULK_LOAD, tableName.toBytes(),
|
||||
familyName, storeFileNames)),
|
||||
any(boolean.class))).thenAnswer(new Answer() {
|
||||
anyBoolean())).thenAnswer(new Answer() {
|
||||
public Object answer(InvocationOnMock invocation) {
|
||||
WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
|
||||
WALKey walKey = invocation.getArgument(1);
|
||||
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
|
||||
if (mvcc != null) {
|
||||
MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
|
||||
|
@ -134,11 +134,11 @@ public class TestBulkLoad {
|
|||
|
||||
@Test
|
||||
public void shouldBulkLoadSingleFamilyHLog() throws IOException {
|
||||
when(log.append(any(HRegionInfo.class),
|
||||
any(WALKey.class), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
|
||||
any(boolean.class))).thenAnswer(new Answer() {
|
||||
when(log.append(any(),
|
||||
any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
|
||||
anyBoolean())).thenAnswer(new Answer() {
|
||||
public Object answer(InvocationOnMock invocation) {
|
||||
WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
|
||||
WALKey walKey = invocation.getArgument(1);
|
||||
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
|
||||
if (mvcc != null) {
|
||||
MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
|
||||
|
@ -153,11 +153,11 @@ public class TestBulkLoad {
|
|||
|
||||
@Test
|
||||
public void shouldBulkLoadManyFamilyHLog() throws IOException {
|
||||
when(log.append(any(HRegionInfo.class),
|
||||
any(WALKey.class), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
|
||||
any(boolean.class))).thenAnswer(new Answer() {
|
||||
when(log.append(any(),
|
||||
any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
|
||||
anyBoolean())).thenAnswer(new Answer() {
|
||||
public Object answer(InvocationOnMock invocation) {
|
||||
WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
|
||||
WALKey walKey = invocation.getArgument(1);
|
||||
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
|
||||
if (mvcc != null) {
|
||||
MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
|
||||
|
@ -173,11 +173,11 @@ public class TestBulkLoad {
|
|||
|
||||
@Test
|
||||
public void shouldBulkLoadManyFamilyHLogEvenWhenTableNameNamespaceSpecified() throws IOException {
|
||||
when(log.append(any(HRegionInfo.class),
|
||||
any(WALKey.class), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
|
||||
any(boolean.class))).thenAnswer(new Answer() {
|
||||
when(log.append(any(),
|
||||
any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
|
||||
anyBoolean())).thenAnswer(new Answer() {
|
||||
public Object answer(InvocationOnMock invocation) {
|
||||
WALKey walKey = invocation.getArgumentAt(1, WALKey.class);
|
||||
WALKey walKey = invocation.getArgument(1);
|
||||
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
|
||||
if (mvcc != null) {
|
||||
MultiVersionConcurrencyControl.WriteEntry we = mvcc.begin();
|
||||
|
|
|
@ -387,7 +387,7 @@ public class TestCompaction {
|
|||
thread.interruptIfNecessary();
|
||||
}
|
||||
|
||||
private class StoreMockMaker extends StatefulStoreMockMaker {
|
||||
class StoreMockMaker extends StatefulStoreMockMaker {
|
||||
public ArrayList<HStoreFile> compacting = new ArrayList<>();
|
||||
public ArrayList<HStoreFile> notCompacting = new ArrayList<>();
|
||||
private ArrayList<Integer> results;
|
||||
|
@ -556,12 +556,10 @@ public class TestCompaction {
|
|||
// Set up the region mock that redirects compactions.
|
||||
HRegion r = mock(HRegion.class);
|
||||
when(
|
||||
r.compact(any(CompactionContext.class), any(HStore.class),
|
||||
any(ThroughputController.class), any(User.class))).then(new Answer<Boolean>() {
|
||||
r.compact(any(), any(), any(), any())).then(new Answer<Boolean>() {
|
||||
@Override
|
||||
public Boolean answer(InvocationOnMock invocation) throws Throwable {
|
||||
invocation.getArgumentAt(0, CompactionContext.class).compact(
|
||||
invocation.getArgumentAt(2, ThroughputController.class), null);
|
||||
invocation.<CompactionContext>getArgument(0).compact(invocation.getArgument(2), null);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
@ -569,7 +567,8 @@ public class TestCompaction {
|
|||
// Set up store mocks for 2 "real" stores and the one we use for blocking CST.
|
||||
ArrayList<Integer> results = new ArrayList<>();
|
||||
StoreMockMaker sm = new StoreMockMaker(results), sm2 = new StoreMockMaker(results);
|
||||
HStore store = sm.createStoreMock("store1"), store2 = sm2.createStoreMock("store2");
|
||||
HStore store = sm.createStoreMock("store1");
|
||||
HStore store2 = sm2.createStoreMock("store2");
|
||||
BlockingStoreMockMaker blocker = new BlockingStoreMockMaker();
|
||||
|
||||
// First, block the compaction thread so that we could muck with queue.
|
||||
|
|
|
@ -186,7 +186,7 @@ public class TestCompactionArchiveIOException {
|
|||
// none of the other files are cleared from the compactedfiles list.
|
||||
// Simulate this condition with a dummy file
|
||||
doThrow(new IOException("Error for test"))
|
||||
.when(errFS).rename(eq(new Path(storeDir, ERROR_FILE)), any(Path.class));
|
||||
.when(errFS).rename(eq(new Path(storeDir, ERROR_FILE)), any());
|
||||
|
||||
HRegionFileSystem fs = new HRegionFileSystem(conf, errFS, tableDir, info);
|
||||
final Configuration walConf = new Configuration(conf);
|
||||
|
|
|
@ -1104,19 +1104,19 @@ public class TestHRegion {
|
|||
}
|
||||
}
|
||||
|
||||
class IsFlushWALMarker extends ArgumentMatcher<WALEdit> {
|
||||
class IsFlushWALMarker implements ArgumentMatcher<WALEdit> {
|
||||
volatile FlushAction[] actions;
|
||||
public IsFlushWALMarker(FlushAction... actions) {
|
||||
this.actions = actions;
|
||||
}
|
||||
@Override
|
||||
public boolean matches(Object edit) {
|
||||
List<Cell> cells = ((WALEdit)edit).getCells();
|
||||
public boolean matches(WALEdit edit) {
|
||||
List<Cell> cells = edit.getCells();
|
||||
if (cells.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
if (WALEdit.isMetaEditFamily(cells.get(0))) {
|
||||
FlushDescriptor desc = null;
|
||||
FlushDescriptor desc;
|
||||
try {
|
||||
desc = WALEdit.getFlushDescriptor(cells.get(0));
|
||||
} catch (IOException e) {
|
||||
|
@ -2441,8 +2441,7 @@ public class TestHRegion {
|
|||
Mockito.doAnswer(new Answer() {
|
||||
@Override
|
||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||
MiniBatchOperationInProgress<Mutation> mb = invocation.getArgumentAt(0,
|
||||
MiniBatchOperationInProgress.class);
|
||||
MiniBatchOperationInProgress<Mutation> mb = invocation.getArgument(0);
|
||||
mb.addOperationsFromCP(0, new Mutation[]{addPut});
|
||||
return null;
|
||||
}
|
||||
|
@ -5960,7 +5959,7 @@ public class TestHRegion {
|
|||
thenAnswer(new Answer<Long>() {
|
||||
@Override
|
||||
public Long answer(InvocationOnMock invocation) throws Throwable {
|
||||
WALKey key = invocation.getArgumentAt(1, WALKey.class);
|
||||
WALKey key = invocation.getArgument(1);
|
||||
MultiVersionConcurrencyControl.WriteEntry we = key.getMvcc().begin();
|
||||
key.setWriteEntry(we);
|
||||
return 1L;
|
||||
|
|
|
@ -1026,7 +1026,7 @@ public class TestHStore {
|
|||
// call first time after files changed
|
||||
spiedStore.refreshStoreFiles();
|
||||
assertEquals(2, this.store.getStorefilesCount());
|
||||
verify(spiedStore, times(1)).replaceStoreFiles(any(Collection.class), any(Collection.class));
|
||||
verify(spiedStore, times(1)).replaceStoreFiles(any(), any());
|
||||
|
||||
// call second time
|
||||
spiedStore.refreshStoreFiles();
|
||||
|
|
|
@ -69,8 +69,8 @@ public class TestRegionServerRegionSpaceUseReport {
|
|||
|
||||
// Call the real method to convert the map into a protobuf
|
||||
HRegionServer rs = mock(HRegionServer.class);
|
||||
doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any(Map.class));
|
||||
doCallRealMethod().when(rs).convertRegionSize(any(RegionInfo.class), anyLong());
|
||||
doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any());
|
||||
doCallRealMethod().when(rs).convertRegionSize(any(), anyLong());
|
||||
|
||||
RegionSpaceUseReportRequest requests = rs.buildRegionSpaceUseReportRequest(sizes);
|
||||
assertEquals(sizes.size(), requests.getSpaceUseCount());
|
||||
|
@ -87,8 +87,8 @@ public class TestRegionServerRegionSpaceUseReport {
|
|||
public void testNullMap() {
|
||||
// Call the real method to convert the map into a protobuf
|
||||
HRegionServer rs = mock(HRegionServer.class);
|
||||
doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any(Map.class));
|
||||
doCallRealMethod().when(rs).convertRegionSize(any(RegionInfo.class), anyLong());
|
||||
doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any());
|
||||
doCallRealMethod().when(rs).convertRegionSize(any(), anyLong());
|
||||
|
||||
rs.buildRegionSpaceUseReportRequest(null);
|
||||
}
|
||||
|
@ -105,8 +105,8 @@ public class TestRegionServerRegionSpaceUseReport {
|
|||
|
||||
// Call the real method to convert the map into a protobuf
|
||||
HRegionServer rs = mock(HRegionServer.class);
|
||||
doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any(Map.class));
|
||||
doCallRealMethod().when(rs).convertRegionSize(any(RegionInfo.class), anyLong());
|
||||
doCallRealMethod().when(rs).buildRegionSpaceUseReportRequest(any());
|
||||
doCallRealMethod().when(rs).convertRegionSize(any(), anyLong());
|
||||
|
||||
rs.buildRegionSpaceUseReportRequest(sizes);
|
||||
}
|
||||
|
|
|
@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
|
|||
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
|
||||
import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
|
||||
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Test;
|
||||
|
@ -76,9 +74,9 @@ public class TestStripeStoreEngine {
|
|||
StripeCompactor mockCompactor = mock(StripeCompactor.class);
|
||||
se.setCompactorOverride(mockCompactor);
|
||||
when(
|
||||
mockCompactor.compact(any(CompactionRequestImpl.class), anyInt(), anyLong(), any(byte[].class),
|
||||
any(byte[].class), any(byte[].class), any(byte[].class),
|
||||
any(ThroughputController.class), any(User.class)))
|
||||
mockCompactor.compact(any(), anyInt(), anyLong(), any(),
|
||||
any(), any(), any(),
|
||||
any(), any()))
|
||||
.thenReturn(new ArrayList<>());
|
||||
|
||||
// Produce 3 L0 files.
|
||||
|
@ -105,7 +103,7 @@ public class TestStripeStoreEngine {
|
|||
|
||||
private static HStoreFile createFile() throws Exception {
|
||||
HStoreFile sf = mock(HStoreFile.class);
|
||||
when(sf.getMetadataValue(any(byte[].class)))
|
||||
when(sf.getMetadataValue(any()))
|
||||
.thenReturn(StripeStoreFileManager.INVALID_KEY);
|
||||
when(sf.getReader()).thenReturn(mock(StoreFileReader.class));
|
||||
when(sf.getPath()).thenReturn(new Path("moo"));
|
||||
|
|
|
@ -93,22 +93,22 @@ public class TestCompactor {
|
|||
StoreFileWriter writer = mock(StoreFileWriter.class);
|
||||
doAnswer(new Answer<Object>() {
|
||||
public Object answer(InvocationOnMock invocation) {
|
||||
return realWriter.kvs.add((KeyValue) invocation.getArguments()[0]);
|
||||
return realWriter.kvs.add((KeyValue) invocation.getArgument(0));
|
||||
}
|
||||
}).when(writer).append(any(KeyValue.class));
|
||||
}).when(writer).append(any());
|
||||
doAnswer(new Answer<Object>() {
|
||||
public Object answer(InvocationOnMock invocation) {
|
||||
Object[] args = invocation.getArguments();
|
||||
return realWriter.data.put((byte[]) args[0], (byte[]) args[1]);
|
||||
}
|
||||
}).when(writer).appendFileInfo(any(byte[].class), any(byte[].class));
|
||||
}).when(writer).appendFileInfo(any(), any());
|
||||
doAnswer(new Answer<Void>() {
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocation) throws Throwable {
|
||||
realWriter.hasMetadata = true;
|
||||
return null;
|
||||
}
|
||||
}).when(writer).appendMetadata(any(long.class), any(boolean.class));
|
||||
}).when(writer).appendMetadata(anyLong(), anyBoolean());
|
||||
doAnswer(new Answer<Path>() {
|
||||
@Override
|
||||
public Path answer(InvocationOnMock invocation) throws Throwable {
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
|||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
|
@ -102,7 +101,7 @@ public class TestDateTieredCompactor {
|
|||
when(store.areWritesEnabled()).thenReturn(true);
|
||||
when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
|
||||
when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
|
||||
when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(),
|
||||
when(store.createWriterInTmp(anyLong(), any(), anyBoolean(),
|
||||
anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
|
||||
when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
|
||||
OptionalLong maxSequenceId = StoreUtils.getMaxSequenceIdInList(storefiles);
|
||||
|
|
|
@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
|||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||
import org.apache.hadoop.hbase.regionserver.BloomType;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
|
@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.regionserver.StripeStoreFlusher;
|
|||
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture;
|
||||
import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -237,7 +235,7 @@ public class TestStripeCompactionPolicy {
|
|||
scr.execute(sc, NoLimitThroughputController.INSTANCE, null);
|
||||
verify(sc, only()).compact(eq(scr.getRequest()), anyInt(), anyLong(), aryEq(OPEN_KEY),
|
||||
aryEq(OPEN_KEY), aryEq(OPEN_KEY), aryEq(OPEN_KEY),
|
||||
any(NoLimitThroughputController.class), any(User.class));
|
||||
any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -551,9 +549,8 @@ public class TestStripeCompactionPolicy {
|
|||
scr.execute(sc, NoLimitThroughputController.INSTANCE, null);
|
||||
verify(sc, times(1)).compact(eq(scr.getRequest()), argThat(new ArgumentMatcher<List<byte[]>>() {
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<byte[]> other = (List<byte[]>) argument;
|
||||
public boolean matches(List<byte[]> argument) {
|
||||
List<byte[]> other = argument;
|
||||
if (other.size() != boundaries.size()) return false;
|
||||
for (int i = 0; i < other.size(); ++i) {
|
||||
if (!Bytes.equals(other.get(i), boundaries.get(i))) return false;
|
||||
|
@ -562,7 +559,7 @@ public class TestStripeCompactionPolicy {
|
|||
}
|
||||
}), dropDeletesFrom == null ? isNull(byte[].class) : aryEq(dropDeletesFrom),
|
||||
dropDeletesTo == null ? isNull(byte[].class) : aryEq(dropDeletesTo),
|
||||
any(NoLimitThroughputController.class), any(User.class));
|
||||
any(), any());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -574,7 +571,7 @@ public class TestStripeCompactionPolicy {
|
|||
* @param count Expected # of resulting stripes, null if not checked.
|
||||
* @param size Expected target stripe size, null if not checked.
|
||||
* @param start Left boundary of the compaction.
|
||||
* @param righr Right boundary of the compaction.
|
||||
* @param end Right boundary of the compaction.
|
||||
*/
|
||||
private void verifyCompaction(StripeCompactionPolicy policy, StripeInformationProvider si,
|
||||
Collection<HStoreFile> sfs, Boolean dropDeletes, Integer count, Long size,
|
||||
|
@ -588,7 +585,7 @@ public class TestStripeCompactionPolicy {
|
|||
count == null ? anyInt() : eq(count.intValue()),
|
||||
size == null ? anyLong() : eq(size.longValue()), aryEq(start), aryEq(end),
|
||||
dropDeletesMatcher(dropDeletes, start), dropDeletesMatcher(dropDeletes, end),
|
||||
any(NoLimitThroughputController.class), any(User.class));
|
||||
any(), any());
|
||||
}
|
||||
|
||||
/** Verify arbitrary flush. */
|
||||
|
@ -612,7 +609,7 @@ public class TestStripeCompactionPolicy {
|
|||
|
||||
|
||||
private byte[] dropDeletesMatcher(Boolean dropDeletes, byte[] value) {
|
||||
return dropDeletes == null ? any(byte[].class)
|
||||
return dropDeletes == null ? any()
|
||||
: (dropDeletes.booleanValue() ? aryEq(value) : isNull(byte[].class));
|
||||
}
|
||||
|
||||
|
@ -780,7 +777,7 @@ public class TestStripeCompactionPolicy {
|
|||
when(store.getColumnFamilyDescriptor()).thenReturn(col);
|
||||
when(store.getRegionInfo()).thenReturn(info);
|
||||
when(
|
||||
store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(),
|
||||
store.createWriterInTmp(anyLong(), any(), anyBoolean(),
|
||||
anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
|
||||
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
|||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanInfo;
|
||||
|
@ -201,7 +200,7 @@ public class TestStripeCompactor {
|
|||
when(store.areWritesEnabled()).thenReturn(true);
|
||||
when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
|
||||
when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
|
||||
when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(),
|
||||
when(store.createWriterInTmp(anyLong(), any(), anyBoolean(),
|
||||
anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
|
||||
when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.ArgumentMatchers.anyInt;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
|
@ -997,7 +998,7 @@ public abstract class AbstractTestWALReplay {
|
|||
}
|
||||
return b;
|
||||
}
|
||||
}).when(spyIn).read(any(byte[].class), any(int.class), any(int.class));
|
||||
}).when(spyIn).read(any(byte[].class), anyInt(), anyInt());
|
||||
doAnswer(new Answer<Void>() {
|
||||
|
||||
@Override
|
||||
|
|
|
@ -45,12 +45,16 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.mockito.ArgumentMatchers.argThat;
|
||||
import static org.mockito.Matchers.anyLong;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Matchers.isA;
|
||||
import static org.mockito.Matchers.argThat;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.mockito.Mockito.atLeastOnce;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
@Category({MediumTests.class})
|
||||
|
@ -151,14 +155,14 @@ public class TestCanaryTool {
|
|||
// One table's timeout is set for 0 ms and thus, should lead to an error.
|
||||
verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
public boolean matches(LoggingEvent argument) {
|
||||
return ((LoggingEvent) argument).getRenderedMessage().contains("exceeded the configured read timeout.");
|
||||
}
|
||||
}));
|
||||
verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
return ((LoggingEvent) argument).getRenderedMessage().contains("The configured read timeout was");
|
||||
public boolean matches(LoggingEvent argument) {
|
||||
return argument.getRenderedMessage().contains("The configured read timeout was");
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
@ -173,10 +177,11 @@ public class TestCanaryTool {
|
|||
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
|
||||
assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
|
||||
assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
|
||||
verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
|
||||
verify(mockAppender, times(1)).doAppend(argThat(
|
||||
new ArgumentMatcher<LoggingEvent>() {
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
return ((LoggingEvent) argument).getRenderedMessage().contains("The configured write timeout was");
|
||||
public boolean matches(LoggingEvent argument) {
|
||||
return argument.getRenderedMessage().contains("The configured write timeout was");
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
@ -187,8 +192,8 @@ public class TestCanaryTool {
|
|||
runRegionserverCanary();
|
||||
verify(mockAppender).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
return ((LoggingEvent) argument).getRenderedMessage().contains("Regionserver not serving any regions");
|
||||
public boolean matches(LoggingEvent argument) {
|
||||
return argument.getRenderedMessage().contains("Regionserver not serving any regions");
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
@ -201,8 +206,8 @@ public class TestCanaryTool {
|
|||
runRegionserverCanary();
|
||||
verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
return ((LoggingEvent) argument).getRenderedMessage().contains("Regionserver not serving any regions");
|
||||
public boolean matches(LoggingEvent argument) {
|
||||
return argument.getRenderedMessage().contains("Regionserver not serving any regions");
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
|
|
@ -374,7 +374,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
|
|||
.when(
|
||||
hri.bulkLoadHFile((RpcController) Mockito.any(), (BulkLoadHFileRequest) Mockito.any()))
|
||||
.thenThrow(new ServiceException(new IOException("injecting bulk load error")));
|
||||
Mockito.when(c.getClient(Mockito.any(ServerName.class))).thenReturn(hri);
|
||||
Mockito.when(c.getClient(Mockito.any())).thenReturn(hri);
|
||||
return c;
|
||||
}
|
||||
|
||||
|
|
|
@ -1008,7 +1008,7 @@ public class TestWALSplit {
|
|||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
Entry entry = (Entry) invocation.getArguments()[0];
|
||||
Entry entry = (Entry) invocation.getArgument(0);
|
||||
WALEdit edit = entry.getEdit();
|
||||
List<Cell> cells = edit.getCells();
|
||||
assertEquals(1, cells.size());
|
||||
|
|
|
@ -63,7 +63,7 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
22
pom.xml
22
pom.xml
|
@ -1380,7 +1380,7 @@
|
|||
<hamcrest.version>1.3</hamcrest.version>
|
||||
<htrace.version>3.2.0-incubating</htrace.version>
|
||||
<log4j.version>1.2.17</log4j.version>
|
||||
<mockito-all.version>1.10.19</mockito-all.version>
|
||||
<mockito-core.version>2.1.0</mockito-core.version>
|
||||
<!--Internally we use a different version of protobuf. See hbase-protocol-shaded-->
|
||||
<external.protobuf.version>2.5.0</external.protobuf.version>
|
||||
<protobuf.plugin.version>0.5.0</protobuf.plugin.version>
|
||||
|
@ -1978,8 +1978,8 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>${mockito-all.version}</version>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${mockito-core.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -3331,8 +3331,8 @@
|
|||
<additionalDependencies>
|
||||
<additionalDependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>${mockito-all.version}</version>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${mockito-core.version}</version>
|
||||
</additionalDependency>
|
||||
<additionalDependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
|
@ -3374,8 +3374,8 @@
|
|||
<additionalDependencies>
|
||||
<additionalDependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>${mockito-all.version}</version>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${mockito-core.version}</version>
|
||||
</additionalDependency>
|
||||
<additionalDependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
|
@ -3428,8 +3428,8 @@
|
|||
<additionalDependencies>
|
||||
<additionalDependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>${mockito-all.version}</version>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${mockito-core.version}</version>
|
||||
</additionalDependency>
|
||||
<additionalDependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
|
@ -3481,8 +3481,8 @@
|
|||
<additionalDependencies>
|
||||
<additionalDependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>${mockito-all.version}</version>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${mockito-core.version}</version>
|
||||
</additionalDependency>
|
||||
<additionalDependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
|
|
|
@ -117,8 +117,8 @@ First, add a dependency for Mockito to your Maven POM file.
|
|||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>1.9.5</version>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
----
|
||||
|
|
Loading…
Reference in New Issue