HBASE-15744 Port over small format/text improvements from HBASE-13784

(Jurriaan Mous)

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Jurriaan Mous 2016-05-01 13:33:00 +02:00 committed by stack
parent ccd8888b4b
commit 15631a76f5
4 changed files with 22 additions and 28 deletions

View File

@ -43,7 +43,7 @@ public interface RpcRetryingCaller<T> {
/** /**
* Call the server once only. * Call the server once only.
* {@link RetryingCallable} has a strange shape so we can do retrys. Use this invocation if you * {@link RetryingCallable} has a strange shape so we can do retries. Use this invocation if you
* want to do a single call only (A call to {@link RetryingCallable#call(int)} will not likely * want to do a single call only (A call to {@link RetryingCallable#call(int)} will not likely
* succeed). * succeed).
* @return an object of type T * @return an object of type T

View File

@ -1,5 +1,4 @@
/** /**
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -16,8 +15,6 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
@ -297,11 +294,7 @@ public class RpcRetryingCallerWithReadReplicas {
} else { } else {
rl = cConnection.locateRegion(tableName, row, useCache, true, replicaId); rl = cConnection.locateRegion(tableName, row, useCache, true, replicaId);
} }
} catch (DoNotRetryIOException e) { } catch (DoNotRetryIOException | InterruptedIOException | RetriesExhaustedException e) {
throw e;
} catch (RetriesExhaustedException e) {
throw e;
} catch (InterruptedIOException e) {
throw e; throw e;
} catch (IOException e) { } catch (IOException e) {
throw new RetriesExhaustedException("Can't get the location", e); throw new RetriesExhaustedException("Can't get the location", e);

View File

@ -29,29 +29,29 @@ import java.io.IOException;
* Interface for RpcClient implementations so ConnectionManager can handle it. * Interface for RpcClient implementations so ConnectionManager can handle it.
*/ */
@InterfaceAudience.Private public interface RpcClient extends Closeable { @InterfaceAudience.Private public interface RpcClient extends Closeable {
public final static String FAILED_SERVER_EXPIRY_KEY = "hbase.ipc.client.failed.servers.expiry"; String FAILED_SERVER_EXPIRY_KEY = "hbase.ipc.client.failed.servers.expiry";
public final static int FAILED_SERVER_EXPIRY_DEFAULT = 2000; int FAILED_SERVER_EXPIRY_DEFAULT = 2000;
public final static String IDLE_TIME = "hbase.ipc.client.connection.minIdleTimeBeforeClose"; String IDLE_TIME = "hbase.ipc.client.connection.minIdleTimeBeforeClose";
public static final String IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY = String IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY =
"hbase.ipc.client.fallback-to-simple-auth-allowed"; "hbase.ipc.client.fallback-to-simple-auth-allowed";
public static final boolean IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT = false; boolean IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT = false;
public static final String SPECIFIC_WRITE_THREAD = "hbase.ipc.client.specificThreadForWriting"; String SPECIFIC_WRITE_THREAD = "hbase.ipc.client.specificThreadForWriting";
public static final String DEFAULT_CODEC_CLASS = "hbase.client.default.rpc.codec"; String DEFAULT_CODEC_CLASS = "hbase.client.default.rpc.codec";
public final static String SOCKET_TIMEOUT_CONNECT = "hbase.ipc.client.socket.timeout.connect"; String SOCKET_TIMEOUT_CONNECT = "hbase.ipc.client.socket.timeout.connect";
/** /**
* How long we wait when we wait for an answer. It's not the operation time, it's the time * How long we wait when we wait for an answer. It's not the operation time, it's the time
* we wait when we start to receive an answer, when the remote write starts to send the data. * we wait when we start to receive an answer, when the remote write starts to send the data.
*/ */
public final static String SOCKET_TIMEOUT_READ = "hbase.ipc.client.socket.timeout.read"; String SOCKET_TIMEOUT_READ = "hbase.ipc.client.socket.timeout.read";
public final static String SOCKET_TIMEOUT_WRITE = "hbase.ipc.client.socket.timeout.write"; String SOCKET_TIMEOUT_WRITE = "hbase.ipc.client.socket.timeout.write";
public final static int DEFAULT_SOCKET_TIMEOUT_CONNECT = 10000; // 10 seconds int DEFAULT_SOCKET_TIMEOUT_CONNECT = 10000; // 10 seconds
public final static int DEFAULT_SOCKET_TIMEOUT_READ = 20000; // 20 seconds int DEFAULT_SOCKET_TIMEOUT_READ = 20000; // 20 seconds
public final static int DEFAULT_SOCKET_TIMEOUT_WRITE = 60000; // 60 seconds int DEFAULT_SOCKET_TIMEOUT_WRITE = 60000; // 60 seconds
// Used by the server, for compatibility with old clients. // Used by the server, for compatibility with old clients.
// The client in 0.99+ does not ping the server. // The client in 0.99+ does not ping the server.
final static int PING_CALL_ID = -1; int PING_CALL_ID = -1;
/** /**
* Creates a "channel" that can be used by a blocking protobuf service. Useful setting up * Creates a "channel" that can be used by a blocking protobuf service. Useful setting up
@ -64,8 +64,8 @@ import java.io.IOException;
* @return A blocking rpc channel that goes via this rpc client instance. * @return A blocking rpc channel that goes via this rpc client instance.
* @throws IOException when channel could not be created * @throws IOException when channel could not be created
*/ */
public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout)
int rpcTimeout) throws IOException; throws IOException;
/** /**
* Interrupt the connections to the given server. This should be called if the server * Interrupt the connections to the given server. This should be called if the server
@ -76,13 +76,14 @@ import java.io.IOException;
* safe exception. * safe exception.
* @param sn server location to cancel connections of * @param sn server location to cancel connections of
*/ */
public void cancelConnections(ServerName sn); void cancelConnections(ServerName sn);
/** /**
* Stop all threads related to this client. No further calls may be made * Stop all threads related to this client. No further calls may be made
* using this client. * using this client.
*/ */
@Override public void close(); @Override
void close();
/** /**
* @return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so * @return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so

View File

@ -5430,7 +5430,7 @@ public class TestFromClientSide {
table.put(put); table.put(put);
} }
// nomal scan // normal scan
ResultScanner scanner = table.getScanner(new Scan()); ResultScanner scanner = table.getScanner(new Scan());
int count = 0; int count = 0;
for (Result r : scanner) { for (Result r : scanner) {