HBASE-26027 The calling of HTable.batch blocked at AsyncRequestFutureImpl.waitUntilDone caused by ArrayStoreException (#3925)
* HBASE-26027 The calling of HTable.batch blocked at AsyncRequestFutureImpl.waitUntilDone caused by ArrayStoreException
This commit is contained in:
parent
b1bc5f3a5c
commit
26e37bcbfe
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -1132,7 +1133,17 @@ class AsyncRequestFutureImpl<CResult> implements AsyncRequestFuture {
|
|||
@Override
|
||||
public void waitUntilDone() throws InterruptedIOException {
|
||||
try {
|
||||
waitUntilDone(Long.MAX_VALUE);
|
||||
if (this.operationTimeout > 0) {
|
||||
// the worker thread maybe over by some exception without decrement the actionsInProgress,
|
||||
// then the guarantee of operationTimeout will be broken, so we should set cutoff to avoid
|
||||
// stuck here forever
|
||||
long cutoff = (EnvironmentEdgeManager.currentTime() + this.operationTimeout) * 1000L;
|
||||
if (!waitUntilDone(cutoff)) {
|
||||
throw new SocketTimeoutException("time out before the actionsInProgress changed to zero");
|
||||
}
|
||||
} else {
|
||||
waitUntilDone(Long.MAX_VALUE);
|
||||
}
|
||||
} catch (InterruptedException iex) {
|
||||
throw new InterruptedIOException(iex.getMessage());
|
||||
} finally {
|
||||
|
@ -1144,7 +1155,7 @@ class AsyncRequestFutureImpl<CResult> implements AsyncRequestFuture {
|
|||
}
|
||||
}
|
||||
|
||||
private boolean waitUntilDone(long cutoff) throws InterruptedException {
|
||||
private boolean waitUntilDone(long cutoff) throws InterruptedException{
|
||||
boolean hasWait = cutoff != Long.MAX_VALUE;
|
||||
long lastLog = EnvironmentEdgeManager.currentTime();
|
||||
long currentInProgress;
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.client.Get;
|
|||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
|
||||
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
|
@ -158,7 +157,7 @@ public class TestClientOperationTimeout {
|
|||
}
|
||||
|
||||
/**
|
||||
* Tests that a batch mutate on a table throws {@link RetriesExhaustedException} when the
|
||||
* Tests that a batch mutate on a table throws {@link SocketTimeoutException} when the
|
||||
* operation takes longer than 'hbase.client.operation.timeout'.
|
||||
*/
|
||||
@Test
|
||||
|
@ -175,7 +174,7 @@ public class TestClientOperationTimeout {
|
|||
TABLE.batch(puts, new Object[2]);
|
||||
Assert.fail("should not reach here");
|
||||
} catch (Exception e) {
|
||||
Assert.assertTrue(e instanceof RetriesExhaustedWithDetailsException);
|
||||
Assert.assertTrue(e instanceof SocketTimeoutException);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue