HBASE-19758 Split TestHCM to several smaller tests
Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
70515f5311
commit
e0eea94c98
|
@ -0,0 +1,83 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.SocketTimeoutException;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Based class for testing operation timeout logic for {@link ConnectionImplementation}.
|
||||
*/
|
||||
public abstract class AbstractTestCIOperationTimeout extends AbstractTestCITimeout {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractTestCIOperationTimeout.class);
|
||||
|
||||
private TableName tableName;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
tableName = TableName.valueOf(name.getMethodName());
|
||||
TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
|
||||
.addCoprocessor(SleepAndFailFirstTime.class.getName())
|
||||
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)).build();
|
||||
TEST_UTIL.getAdmin().createTable(htd);
|
||||
}
|
||||
|
||||
protected abstract void execute(Table table) throws IOException;
|
||||
|
||||
/**
|
||||
* Test that an operation can fail if we read the global operation timeout, even if the individual
|
||||
* timeout is fine. We do that with:
|
||||
* <ul>
|
||||
* <li>client side: an operation timeout of 30 seconds</li>
|
||||
* <li>server side: we sleep 20 second at each attempt. The first work fails, the second one
|
||||
* succeeds. But the client won't wait that much, because 20 + 20 > 30, so the client timed out
|
||||
* when the server answers.</li>
|
||||
* </ul>
|
||||
*/
|
||||
@Test
|
||||
public void testOperationTimeout() throws IOException {
|
||||
TableBuilder builder =
|
||||
TEST_UTIL.getConnection().getTableBuilder(tableName, null).setRpcTimeout(Integer.MAX_VALUE)
|
||||
.setReadRpcTimeout(Integer.MAX_VALUE).setWriteRpcTimeout(Integer.MAX_VALUE);
|
||||
// Check that it works if the timeout is big enough
|
||||
SleepAndFailFirstTime.ct.set(0);
|
||||
try (Table table = builder.setOperationTimeout(120 * 1000).build()) {
|
||||
execute(table);
|
||||
}
|
||||
// Resetting and retrying. Will fail this time, not enough time for the second try
|
||||
SleepAndFailFirstTime.ct.set(0);
|
||||
try (Table table = builder.setOperationTimeout(30 * 1000).build()) {
|
||||
SleepAndFailFirstTime.ct.set(0);
|
||||
execute(table);
|
||||
fail("We expect an exception here");
|
||||
} catch (SocketTimeoutException | RetriesExhaustedWithDetailsException e) {
|
||||
// The client has a CallTimeout class, but it's not shared. We're not very clean today,
|
||||
// in the general case you can expect the call to stop, but the exception may vary.
|
||||
// In this test however, we're sure that it will be a socket timeout.
|
||||
LOG.info("We received an exception, as expected ", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Based class for testing rpc timeout logic for {@link ConnectionImplementation}.
|
||||
*/
|
||||
public abstract class AbstractTestCIRpcTimeout extends AbstractTestCITimeout {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractTestCIRpcTimeout.class);
|
||||
|
||||
private TableName tableName;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
tableName = TableName.valueOf(name.getMethodName());
|
||||
TableDescriptor htd =
|
||||
TableDescriptorBuilder.newBuilder(tableName).addCoprocessor(SleepCoprocessor.class.getName())
|
||||
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM)).build();
|
||||
TEST_UTIL.getAdmin().createTable(htd);
|
||||
}
|
||||
|
||||
protected abstract void execute(Table table) throws IOException;
|
||||
|
||||
@Test
|
||||
public void testRpcTimeout() throws IOException {
|
||||
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
|
||||
try (Table table = TEST_UTIL.getConnection().getTableBuilder(tableName, null)
|
||||
.setRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2)
|
||||
.setReadRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2)
|
||||
.setWriteRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2)
|
||||
.setOperationTimeout(SleepCoprocessor.SLEEP_TIME * 100).build()) {
|
||||
execute(table);
|
||||
fail("Get should not have succeeded");
|
||||
} catch (RetriesExhaustedException e) {
|
||||
LOG.info("We received an exception, as expected ", e);
|
||||
}
|
||||
|
||||
// Again, with configuration based override
|
||||
c.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, SleepCoprocessor.SLEEP_TIME / 2);
|
||||
c.setInt(HConstants.HBASE_RPC_READ_TIMEOUT_KEY, SleepCoprocessor.SLEEP_TIME / 2);
|
||||
c.setInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY, SleepCoprocessor.SLEEP_TIME / 2);
|
||||
c.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, SleepCoprocessor.SLEEP_TIME * 100);
|
||||
try (Connection conn = ConnectionFactory.createConnection(c)) {
|
||||
try (Table table = conn.getTable(tableName)) {
|
||||
execute(table);
|
||||
fail("Get should not have succeeded");
|
||||
} catch (RetriesExhaustedException e) {
|
||||
LOG.info("We received an exception, as expected ", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,164 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
|
||||
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
|
||||
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.apache.hadoop.hbase.wal.WALEdit;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
/**
|
||||
* Based class for testing timeout logic for {@link ConnectionImplementation}.
|
||||
*/
|
||||
public abstract class AbstractTestCITimeout {
|
||||
|
||||
protected static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
|
||||
protected static final byte[] FAM_NAM = Bytes.toBytes("f");
|
||||
|
||||
@Rule
|
||||
public final TestName name = new TestName();
|
||||
|
||||
/**
|
||||
* This copro sleeps 20 second. The first call it fails. The second time, it works.
|
||||
*/
|
||||
public static class SleepAndFailFirstTime implements RegionCoprocessor, RegionObserver {
|
||||
static final AtomicLong ct = new AtomicLong(0);
|
||||
static final String SLEEP_TIME_CONF_KEY = "hbase.coprocessor.SleepAndFailFirstTime.sleepTime";
|
||||
static final long DEFAULT_SLEEP_TIME = 20000;
|
||||
static final AtomicLong sleepTime = new AtomicLong(DEFAULT_SLEEP_TIME);
|
||||
|
||||
public SleepAndFailFirstTime() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<RegionObserver> getRegionObserver() {
|
||||
return Optional.of(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postOpen(ObserverContext<RegionCoprocessorEnvironment> c) {
|
||||
RegionCoprocessorEnvironment env = c.getEnvironment();
|
||||
Configuration conf = env.getConfiguration();
|
||||
sleepTime.set(conf.getLong(SLEEP_TIME_CONF_KEY, DEFAULT_SLEEP_TIME));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get,
|
||||
final List<Cell> results) throws IOException {
|
||||
Threads.sleep(sleepTime.get());
|
||||
if (ct.incrementAndGet() == 1) {
|
||||
throw new IOException("first call I fail");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> e, final Put put,
|
||||
final WALEdit edit, final Durability durability) throws IOException {
|
||||
Threads.sleep(sleepTime.get());
|
||||
if (ct.incrementAndGet() == 1) {
|
||||
throw new IOException("first call I fail");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> e,
|
||||
final Delete delete, final WALEdit edit, final Durability durability) throws IOException {
|
||||
Threads.sleep(sleepTime.get());
|
||||
if (ct.incrementAndGet() == 1) {
|
||||
throw new IOException("first call I fail");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> e,
|
||||
final Increment increment) throws IOException {
|
||||
Threads.sleep(sleepTime.get());
|
||||
if (ct.incrementAndGet() == 1) {
|
||||
throw new IOException("first call I fail");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class SleepCoprocessor implements RegionCoprocessor, RegionObserver {
|
||||
public static final int SLEEP_TIME = 5000;
|
||||
|
||||
@Override
|
||||
public Optional<RegionObserver> getRegionObserver() {
|
||||
return Optional.of(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get,
|
||||
final List<Cell> results) throws IOException {
|
||||
Threads.sleep(SLEEP_TIME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> e, final Put put,
|
||||
final WALEdit edit, final Durability durability) throws IOException {
|
||||
Threads.sleep(SLEEP_TIME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> e,
|
||||
final Increment increment) throws IOException {
|
||||
Threads.sleep(SLEEP_TIME);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> e,
|
||||
final Delete delete, final WALEdit edit, final Durability durability) throws IOException {
|
||||
Threads.sleep(SLEEP_TIME);
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpBeforeClass() throws Exception {
|
||||
TEST_UTIL.getConfiguration().setBoolean(HConstants.STATUS_PUBLISHED, true);
|
||||
// Up the handlers; this test needs more than usual.
|
||||
TEST_UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
|
||||
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
|
||||
TEST_UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 3);
|
||||
TEST_UTIL.startMiniCluster(2);
|
||||
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownAfterClass() throws Exception {
|
||||
TEST_UTIL.shutdownMiniCluster();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
/**
|
||||
* Tests that we fail fast when hostname resolution is not working and do not cache
|
||||
* unresolved InetSocketAddresses.
|
||||
*/
|
||||
@Category({MediumTests.class, ClientTests.class})
|
||||
public class TestCIBadHostname {
|
||||
private static HBaseTestingUtility testUtil;
|
||||
private static ConnectionImplementation conn;
|
||||
|
||||
@BeforeClass
|
||||
public static void setupBeforeClass() throws Exception {
|
||||
testUtil = HBaseTestingUtility.createLocalHTU();
|
||||
testUtil.startMiniCluster();
|
||||
conn = (ConnectionImplementation) testUtil.getConnection();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void teardownAfterClass() throws Exception {
|
||||
conn.close();
|
||||
testUtil.shutdownMiniCluster();
|
||||
}
|
||||
|
||||
@Test(expected = UnknownHostException.class)
|
||||
public void testGetAdminBadHostname() throws Exception {
|
||||
// verify that we can get an instance with the cluster hostname
|
||||
ServerName master = testUtil.getHBaseCluster().getMaster().getServerName();
|
||||
try {
|
||||
conn.getAdmin(master);
|
||||
} catch (UnknownHostException uhe) {
|
||||
fail("Obtaining admin to the cluster master should have succeeded");
|
||||
}
|
||||
|
||||
// test that we fail to get a client to an unresolvable hostname, which
|
||||
// means it won't be cached
|
||||
ServerName badHost =
|
||||
ServerName.valueOf("unknownhost.invalid:" + HConstants.DEFAULT_MASTER_PORT,
|
||||
System.currentTimeMillis());
|
||||
conn.getAdmin(badHost);
|
||||
fail("Obtaining admin to unresolvable hostname should have failed");
|
||||
}
|
||||
|
||||
@Test(expected = UnknownHostException.class)
|
||||
public void testGetClientBadHostname() throws Exception {
|
||||
// verify that we can get an instance with the cluster hostname
|
||||
ServerName rs = testUtil.getHBaseCluster().getRegionServer(0).getServerName();
|
||||
try {
|
||||
conn.getClient(rs);
|
||||
} catch (UnknownHostException uhe) {
|
||||
fail("Obtaining client to the cluster regionserver should have succeeded");
|
||||
}
|
||||
|
||||
// test that we fail to get a client to an unresolvable hostname, which
|
||||
// means it won't be cached
|
||||
ServerName badHost =
|
||||
ServerName.valueOf("unknownhost.invalid:" + HConstants.DEFAULT_REGIONSERVER_PORT,
|
||||
System.currentTimeMillis());
|
||||
conn.getAdmin(badHost);
|
||||
fail("Obtaining client to unresolvable hostname should have failed");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, LargeTests.class })
|
||||
public class TestCIDeleteOperationTimeout extends AbstractTestCIOperationTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.delete(new Delete(FAM_NAM));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
public class TestCIDeleteRpcTimeout extends AbstractTestCIRpcTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.delete(new Delete(FAM_NAM));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, LargeTests.class })
|
||||
public class TestCIGetOperationTimeout extends AbstractTestCIOperationTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.get(new Get(FAM_NAM));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
public class TestCIGetRpcTimeout extends AbstractTestCIRpcTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.get(new Get(FAM_NAM));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
public class TestCIIncrementRpcTimeout extends AbstractTestCIRpcTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.increment(new Increment(FAM_NAM).addColumn(FAM_NAM, FAM_NAM, 1));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, LargeTests.class })
|
||||
public class TestCIPutOperationTimeout extends AbstractTestCIOperationTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.put(new Put(FAM_NAM).addColumn(FAM_NAM, FAM_NAM, FAM_NAM));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
public class TestCIPutRpcTimeout extends AbstractTestCIRpcTimeout {
|
||||
|
||||
@Override
|
||||
protected void execute(Table table) throws IOException {
|
||||
table.put(new Put(FAM_NAM).addColumn(FAM_NAM, FAM_NAM, FAM_NAM));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,137 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.net.SocketTimeoutException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
public class TestCISleep extends AbstractTestCITimeout {
|
||||
|
||||
private static Logger LOG = LoggerFactory.getLogger(TestCISleep.class);
|
||||
|
||||
private TableName tableName;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
tableName = TableName.valueOf(name.getMethodName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test starting from 0 index when RpcRetryingCaller calculate the backoff time.
|
||||
*/
|
||||
@Test
|
||||
public void testRpcRetryingCallerSleep() throws Exception {
|
||||
TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
|
||||
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAM_NAM))
|
||||
.addCoprocessorWithSpec("|" + SleepAndFailFirstTime.class.getName() + "||" +
|
||||
SleepAndFailFirstTime.SLEEP_TIME_CONF_KEY + "=2000")
|
||||
.build();
|
||||
TEST_UTIL.getAdmin().createTable(htd);
|
||||
|
||||
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
|
||||
c.setInt(HConstants.HBASE_CLIENT_PAUSE, 3000);
|
||||
c.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 4000);
|
||||
|
||||
try (Connection conn = ConnectionFactory.createConnection(c)) {
|
||||
SleepAndFailFirstTime.ct.set(0);
|
||||
try (Table table = conn.getTableBuilder(tableName, null).setOperationTimeout(8000).build()) {
|
||||
// Check that it works. Because 2s + 3s * RETRY_BACKOFF[0] + 2s < 8s
|
||||
table.get(new Get(FAM_NAM));
|
||||
}
|
||||
SleepAndFailFirstTime.ct.set(0);
|
||||
try (Table table = conn.getTableBuilder(tableName, null).setOperationTimeout(6000).build()) {
|
||||
// Will fail this time. After sleep, there are not enough time for second retry
|
||||
// Beacuse 2s + 3s + 2s > 6s
|
||||
table.get(new Get(FAM_NAM));
|
||||
fail("We expect an exception here");
|
||||
} catch (SocketTimeoutException e) {
|
||||
LOG.info("We received an exception, as expected ", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCallableSleep() throws Exception {
|
||||
long pauseTime;
|
||||
long baseTime = 100;
|
||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
||||
TEST_UTIL.createTable(tableName, FAM_NAM);
|
||||
ClientServiceCallable<Object> regionServerCallable =
|
||||
new ClientServiceCallable<Object>(TEST_UTIL.getConnection(), tableName, FAM_NAM,
|
||||
new RpcControllerFactory(TEST_UTIL.getConfiguration()).newController(),
|
||||
HConstants.PRIORITY_UNSET) {
|
||||
@Override
|
||||
protected Object rpcCall() throws Exception {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
regionServerCallable.prepare(false);
|
||||
for (int i = 0; i < HConstants.RETRY_BACKOFF.length; i++) {
|
||||
pauseTime = regionServerCallable.sleep(baseTime, i);
|
||||
assertTrue(pauseTime >= (baseTime * HConstants.RETRY_BACKOFF[i]));
|
||||
assertTrue(pauseTime <= (baseTime * HConstants.RETRY_BACKOFF[i] * 1.01f));
|
||||
}
|
||||
|
||||
RegionAdminServiceCallable<Object> regionAdminServiceCallable =
|
||||
new RegionAdminServiceCallable<Object>((ClusterConnection) TEST_UTIL.getConnection(),
|
||||
new RpcControllerFactory(TEST_UTIL.getConfiguration()), tableName, FAM_NAM) {
|
||||
@Override
|
||||
public Object call(HBaseRpcController controller) throws Exception {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
regionAdminServiceCallable.prepare(false);
|
||||
for (int i = 0; i < HConstants.RETRY_BACKOFF.length; i++) {
|
||||
pauseTime = regionAdminServiceCallable.sleep(baseTime, i);
|
||||
assertTrue(pauseTime >= (baseTime * HConstants.RETRY_BACKOFF[i]));
|
||||
assertTrue(pauseTime <= (baseTime * HConstants.RETRY_BACKOFF[i] * 1.01f));
|
||||
}
|
||||
|
||||
try (
|
||||
MasterCallable<Object> masterCallable = new MasterCallable<Object>(TEST_UTIL.getConnection(),
|
||||
new RpcControllerFactory(TEST_UTIL.getConfiguration())) {
|
||||
@Override
|
||||
protected Object rpcCall() throws Exception {
|
||||
return null;
|
||||
}
|
||||
}) {
|
||||
for (int i = 0; i < HConstants.RETRY_BACKOFF.length; i++) {
|
||||
pauseTime = masterCallable.sleep(baseTime, i);
|
||||
assertTrue(pauseTime >= (baseTime * HConstants.RETRY_BACKOFF[i]));
|
||||
assertTrue(pauseTime <= (baseTime * HConstants.RETRY_BACKOFF[i] * 1.01f));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue