HBASE-16110 AsyncFS WAL doesn't work with Hadoop 2.8+

Signed-off-by: Sean Busbey <busbey@apache.org>
This commit is contained in:
zhangduo 2016-07-12 11:15:08 +08:00 committed by Sean Busbey
parent a1cc2c4bfe
commit 515c499f95
3 changed files with 485 additions and 327 deletions

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.asyncfs;
import static io.netty.handler.timeout.IdleState.READER_IDLE; import static io.netty.handler.timeout.IdleState.READER_IDLE;
import static io.netty.handler.timeout.IdleState.WRITER_IDLE; import static io.netty.handler.timeout.IdleState.WRITER_IDLE;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.HEART_BEAT_SEQNO; import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.HEART_BEAT_SEQNO;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.READ_TIMEOUT;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.completeFile; import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.completeFile;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.endFileLease; import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.endFileLease;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus; import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.getStatus;
@ -71,7 +72,6 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck; import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
/** /**
@ -339,7 +339,7 @@ public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput {
this.alloc = alloc; this.alloc = alloc;
this.buf = alloc.directBuffer(); this.buf = alloc.directBuffer();
this.state = State.STREAMING; this.state = State.STREAMING;
setupReceiver(conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, HdfsServerConstants.READ_TIMEOUT)); setupReceiver(conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT));
} }
@Override @Override

View File

@ -99,15 +99,15 @@ import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProt
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;
import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException; import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
@ -128,8 +128,10 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
// copied from DFSPacket since it is package private. // copied from DFSPacket since it is package private.
public static final long HEART_BEAT_SEQNO = -1L; public static final long HEART_BEAT_SEQNO = -1L;
// helper class for creating DataChecksum object. // Timeouts for communicating with DataNode for streaming writes/reads
private static final Method CREATE_CHECKSUM; public static final int READ_TIMEOUT = 60 * 1000;
public static final int READ_TIMEOUT_EXTENSION = 5 * 1000;
public static final int WRITE_TIMEOUT = 8 * 60 * 1000;
// helper class for getting Status from PipelineAckProto. In hadoop 2.6 or before, there is a // helper class for getting Status from PipelineAckProto. In hadoop 2.6 or before, there is a
// getStatus method, and for hadoop 2.7 or after, the status is retrieved from flag. The flag may // getStatus method, and for hadoop 2.7 or after, the status is retrieved from flag. The flag may
@ -161,6 +163,17 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
private static final FileCreater FILE_CREATER; private static final FileCreater FILE_CREATER;
// helper class for calling add block method on namenode. There is a addBlockFlags parameter for
// hadoop 2.8 or later. See createBlockAdder for more details.
private interface BlockAdder {
LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,
ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId, String[] favoredNodes)
throws IOException;
}
private static final BlockAdder BLOCK_ADDER;
// helper class for add or remove lease from DFSClient. Hadoop 2.4 use src as the Map's key, and // helper class for add or remove lease from DFSClient. Hadoop 2.4 use src as the Map's key, and
// hadoop 2.5 or after use inodeId. See createLeaseManager for more details. // hadoop 2.5 or after use inodeId. See createLeaseManager for more details.
private interface LeaseManager { private interface LeaseManager {
@ -181,156 +194,182 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR; private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;
private static DFSClientAdaptor createDFSClientAdaptor() { // helper class for convert protos.
try { private interface PBHelper {
final Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");
isClientRunningMethod.setAccessible(true);
return new DFSClientAdaptor() {
@Override ExtendedBlockProto convert(final ExtendedBlock b);
public boolean isClientRunning(DFSClient client) {
try { TokenProto convert(Token<?> tok);
return (Boolean) isClientRunningMethod.invoke(client);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
} catch (NoSuchMethodException e) {
throw new Error(e);
}
} }
private static LeaseManager createLeaseManager() { private static final PBHelper PB_HELPER;
try {
final Method beginFileLeaseMethod =
DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);
beginFileLeaseMethod.setAccessible(true);
final Method endFileLeaseMethod =
DFSClient.class.getDeclaredMethod("endFileLease", long.class);
endFileLeaseMethod.setAccessible(true);
return new LeaseManager() {
@Override // helper class for creating data checksum.
public void begin(DFSClient client, String src, long inodeId) { private interface ChecksumCreater {
try { DataChecksum createChecksum(Object conf);
beginFileLeaseMethod.invoke(client, inodeId, null);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override
public void end(DFSClient client, String src, long inodeId) {
try {
endFileLeaseMethod.invoke(client, inodeId);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
} catch (NoSuchMethodException e) {
LOG.warn("No inodeId related lease methods found, should be hadoop 2.4-", e);
}
try {
final Method beginFileLeaseMethod =
DFSClient.class.getDeclaredMethod("beginFileLease", String.class, DFSOutputStream.class);
beginFileLeaseMethod.setAccessible(true);
final Method endFileLeaseMethod =
DFSClient.class.getDeclaredMethod("endFileLease", String.class);
endFileLeaseMethod.setAccessible(true);
return new LeaseManager() {
@Override
public void begin(DFSClient client, String src, long inodeId) {
try {
beginFileLeaseMethod.invoke(client, src, null);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override
public void end(DFSClient client, String src, long inodeId) {
try {
endFileLeaseMethod.invoke(client, src);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
} catch (NoSuchMethodException e) {
throw new Error(e);
}
} }
private static PipelineAckStatusGetter createPipelineAckStatusGetter() { private static final ChecksumCreater CHECKSUM_CREATER;
try {
final Method getFlagListMethod = PipelineAckProto.class.getMethod("getFlagList"); private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {
@SuppressWarnings("rawtypes") final Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");
Class<? extends Enum> ecnClass; isClientRunningMethod.setAccessible(true);
try { return new DFSClientAdaptor() {
ecnClass =
Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck$ECN") @Override
.asSubclass(Enum.class); public boolean isClientRunning(DFSClient client) {
} catch (ClassNotFoundException e) { try {
throw new Error(e); return (Boolean) isClientRunningMethod.invoke(client);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
} }
@SuppressWarnings("unchecked") };
final Enum<?> disabledECN = Enum.valueOf(ecnClass, "DISABLED"); }
final Method getReplyMethod = PipelineAckProto.class.getMethod("getReply", int.class);
final Method combineHeaderMethod =
PipelineAck.class.getMethod("combineHeader", ecnClass, Status.class);
final Method getStatusFromHeaderMethod =
PipelineAck.class.getMethod("getStatusFromHeader", int.class);
return new PipelineAckStatusGetter() {
@Override private static LeaseManager createLeaseManager25() throws NoSuchMethodException {
public Status get(PipelineAckProto ack) { final Method beginFileLeaseMethod = DFSClient.class.getDeclaredMethod("beginFileLease",
try { long.class, DFSOutputStream.class);
@SuppressWarnings("unchecked") beginFileLeaseMethod.setAccessible(true);
List<Integer> flagList = (List<Integer>) getFlagListMethod.invoke(ack); final Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);
Integer headerFlag; endFileLeaseMethod.setAccessible(true);
if (flagList.isEmpty()) { return new LeaseManager() {
Status reply = (Status) getReplyMethod.invoke(ack, 0);
headerFlag = (Integer) combineHeaderMethod.invoke(null, disabledECN, reply); @Override
} else { public void begin(DFSClient client, String src, long inodeId) {
headerFlag = flagList.get(0); try {
} beginFileLeaseMethod.invoke(client, inodeId, null);
return (Status) getStatusFromHeaderMethod.invoke(null, headerFlag); } catch (IllegalAccessException | InvocationTargetException e) {
} catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e);
throw new RuntimeException(e);
}
} }
}; }
} catch (NoSuchMethodException e) {
LOG.warn("Can not get expected methods, should be hadoop 2.6-", e); @Override
} public void end(DFSClient client, String src, long inodeId) {
try {
endFileLeaseMethod.invoke(client, inodeId);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
private static LeaseManager createLeaseManager24() throws NoSuchMethodException {
final Method beginFileLeaseMethod = DFSClient.class.getDeclaredMethod("beginFileLease",
String.class, DFSOutputStream.class);
beginFileLeaseMethod.setAccessible(true);
final Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease",
String.class);
endFileLeaseMethod.setAccessible(true);
return new LeaseManager() {
@Override
public void begin(DFSClient client, String src, long inodeId) {
try {
beginFileLeaseMethod.invoke(client, src, null);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override
public void end(DFSClient client, String src, long inodeId) {
try {
endFileLeaseMethod.invoke(client, src);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
private static LeaseManager createLeaseManager() throws NoSuchMethodException {
try { try {
final Method getStatusMethod = PipelineAckProto.class.getMethod("getStatus", int.class); return createLeaseManager25();
return new PipelineAckStatusGetter() {
@Override
public Status get(PipelineAckProto ack) {
try {
return (Status) getStatusMethod.invoke(ack, 0);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
} catch (NoSuchMethodException e) { } catch (NoSuchMethodException e) {
throw new Error(e); LOG.debug("No inodeId related lease methods found, should be hadoop 2.4-", e);
} }
return createLeaseManager24();
}
private static PipelineAckStatusGetter createPipelineAckStatusGetter27()
throws NoSuchMethodException {
final Method getFlagListMethod = PipelineAckProto.class.getMethod("getFlagList");
@SuppressWarnings("rawtypes")
Class<? extends Enum> ecnClass;
try {
ecnClass = Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck$ECN")
.asSubclass(Enum.class);
} catch (ClassNotFoundException e) {
final String msg = "Couldn't properly initialize the PipelineAck.ECN class. Please "
+ "update your WAL Provider to not make use of the 'asyncfs' provider. See "
+ "HBASE-16110 for more information.";
LOG.error(msg, e);
throw new Error(msg, e);
}
@SuppressWarnings("unchecked")
final Enum<?> disabledECN = Enum.valueOf(ecnClass, "DISABLED");
final Method getReplyMethod = PipelineAckProto.class.getMethod("getReply", int.class);
final Method combineHeaderMethod = PipelineAck.class.getMethod("combineHeader", ecnClass,
Status.class);
final Method getStatusFromHeaderMethod = PipelineAck.class.getMethod("getStatusFromHeader",
int.class);
return new PipelineAckStatusGetter() {
@Override
public Status get(PipelineAckProto ack) {
try {
@SuppressWarnings("unchecked")
List<Integer> flagList = (List<Integer>) getFlagListMethod.invoke(ack);
Integer headerFlag;
if (flagList.isEmpty()) {
Status reply = (Status) getReplyMethod.invoke(ack, 0);
headerFlag = (Integer) combineHeaderMethod.invoke(null, disabledECN, reply);
} else {
headerFlag = flagList.get(0);
}
return (Status) getStatusFromHeaderMethod.invoke(null, headerFlag);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
private static PipelineAckStatusGetter createPipelineAckStatusGetter26()
throws NoSuchMethodException {
final Method getStatusMethod = PipelineAckProto.class.getMethod("getStatus", int.class);
return new PipelineAckStatusGetter() {
@Override
public Status get(PipelineAckProto ack) {
try {
return (Status) getStatusMethod.invoke(ack, 0);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
private static PipelineAckStatusGetter createPipelineAckStatusGetter()
throws NoSuchMethodException {
try {
return createPipelineAckStatusGetter27();
} catch (NoSuchMethodException e) {
LOG.debug("Can not get expected methods, should be hadoop 2.6-", e);
}
return createPipelineAckStatusGetter26();
} }
private static StorageTypeSetter createStorageTypeSetter() { private static StorageTypeSetter createStorageTypeSetter() {
final Method setStorageTypeMethod; final Method setStorageTypeMethod;
try { try {
setStorageTypeMethod = setStorageTypeMethod = OpWriteBlockProto.Builder.class.getMethod("setStorageType",
OpWriteBlockProto.Builder.class.getMethod("setStorageType", StorageTypeProto.class); StorageTypeProto.class);
} catch (NoSuchMethodException e) { } catch (NoSuchMethodException e) {
LOG.warn("noSetStorageType method found, should be hadoop 2.5-", e); LOG.debug("noSetStorageType method found, should be hadoop 2.5-", e);
return new StorageTypeSetter() { return new StorageTypeSetter() {
@Override @Override
@ -359,7 +398,8 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
}; };
} }
private static FileCreater createFileCreater() { private static FileCreater createFileCreater() throws ClassNotFoundException,
NoSuchMethodException, IllegalAccessException, InvocationTargetException {
for (Method method : ClientProtocol.class.getMethods()) { for (Method method : ClientProtocol.class.getMethods()) {
if (method.getName().equals("create")) { if (method.getName().equals("create")) {
final Method createMethod = method; final Method createMethod = method;
@ -372,8 +412,8 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
String clientName, EnumSetWritable<CreateFlag> flag, boolean createParent, String clientName, EnumSetWritable<CreateFlag> flag, boolean createParent,
short replication, long blockSize) throws IOException { short replication, long blockSize) throws IOException {
try { try {
return (HdfsFileStatus) createMethod.invoke(namenode, src, masked, clientName, return (HdfsFileStatus) createMethod.invoke(namenode, src, masked, clientName, flag,
flag, createParent, replication, blockSize); createParent, replication, blockSize);
} catch (IllegalAccessException e) { } catch (IllegalAccessException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
@ -383,36 +423,159 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
} }
}; };
} else { } else {
try { Class<?> cryptoProtocolVersionClass = Class
Class<?> cryptoProtocolVersionClass = .forName("org.apache.hadoop.crypto.CryptoProtocolVersion");
Class.forName("org.apache.hadoop.crypto.CryptoProtocolVersion"); Method supportedMethod = cryptoProtocolVersionClass.getMethod("supported");
Method supportedMethod = cryptoProtocolVersionClass.getMethod("supported"); final Object supported = supportedMethod.invoke(null);
final Object supported = supportedMethod.invoke(null); return new FileCreater() {
return new FileCreater() {
@Override @Override
public HdfsFileStatus create(ClientProtocol namenode, String src, public HdfsFileStatus create(ClientProtocol namenode, String src, FsPermission masked,
FsPermission masked, String clientName, EnumSetWritable<CreateFlag> flag, String clientName, EnumSetWritable<CreateFlag> flag, boolean createParent,
boolean createParent, short replication, long blockSize) throws IOException { short replication, long blockSize) throws IOException {
try { try {
return (HdfsFileStatus) createMethod.invoke(namenode, src, masked, clientName, return (HdfsFileStatus) createMethod.invoke(namenode, src, masked, clientName, flag,
flag, createParent, replication, blockSize, supported); createParent, replication, blockSize, supported);
} catch (IllegalAccessException e) { } catch (IllegalAccessException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
Throwables.propagateIfPossible(e.getTargetException(), IOException.class); Throwables.propagateIfPossible(e.getTargetException(), IOException.class);
throw new RuntimeException(e); throw new RuntimeException(e);
}
} }
}; }
} catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException };
| InvocationTargetException e) {
throw new Error(e);
}
} }
} }
} }
throw new Error("No create method found for " + ClientProtocol.class.getName()); throw new NoSuchMethodException("Can not find create method in ClientProtocol");
}
private static BlockAdder createBlockAdder() throws NoSuchMethodException {
for (Method method : ClientProtocol.class.getMethods()) {
if (method.getName().equals("addBlock")) {
final Method addBlockMethod = method;
Class<?>[] paramTypes = addBlockMethod.getParameterTypes();
if (paramTypes[paramTypes.length - 1] == String[].class) {
return new BlockAdder() {
@Override
public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,
ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,
String[] favoredNodes) throws IOException {
try {
return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,
excludeNodes, fileId, favoredNodes);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
Throwables.propagateIfPossible(e.getTargetException(), IOException.class);
throw new RuntimeException(e);
}
}
};
} else {
return new BlockAdder() {
@Override
public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,
ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,
String[] favoredNodes) throws IOException {
try {
return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,
excludeNodes, fileId, favoredNodes, null);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
Throwables.propagateIfPossible(e.getTargetException(), IOException.class);
throw new RuntimeException(e);
}
}
};
}
}
}
throw new NoSuchMethodException("Can not find addBlock method in ClientProtocol");
}
private static PBHelper createPBHelper() throws NoSuchMethodException {
Class<?> helperClass;
try {
helperClass = Class.forName("org.apache.hadoop.hdfs.protocolPB.PBHelperClient");
} catch (ClassNotFoundException e) {
LOG.debug("No PBHelperClient class found, should be hadoop 2.7-", e);
helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;
}
final Method convertEBMethod = helperClass.getMethod("convert", ExtendedBlock.class);
final Method convertTokenMethod = helperClass.getMethod("convert", Token.class);
return new PBHelper() {
@Override
public ExtendedBlockProto convert(ExtendedBlock b) {
try {
return (ExtendedBlockProto) convertEBMethod.invoke(null, b);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override
public TokenProto convert(Token<?> tok) {
try {
return (TokenProto) convertTokenMethod.invoke(null, tok);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
private static ChecksumCreater createChecksumCreater28(Class<?> confClass)
throws NoSuchMethodException {
for (Method method : confClass.getMethods()) {
if (method.getName().equals("createChecksum")) {
final Method createChecksumMethod = method;
return new ChecksumCreater() {
@Override
public DataChecksum createChecksum(Object conf) {
try {
return (DataChecksum) createChecksumMethod.invoke(conf, (Object) null);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
}
throw new NoSuchMethodException("Can not find createChecksum method in DfsClientConf");
}
private static ChecksumCreater createChecksumCreater27(Class<?> confClass)
throws NoSuchMethodException {
final Method createChecksumMethod = confClass.getDeclaredMethod("createChecksum");
createChecksumMethod.setAccessible(true);
return new ChecksumCreater() {
@Override
public DataChecksum createChecksum(Object conf) {
try {
return (DataChecksum) createChecksumMethod.invoke(conf);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
}
private static ChecksumCreater createChecksumCreater()
throws NoSuchMethodException, ClassNotFoundException {
try {
return createChecksumCreater28(
Class.forName("org.apache.hadoop.hdfs.client.impl.DfsClientConf"));
} catch (ClassNotFoundException e) {
LOG.debug("No DfsClientConf class found, should be hadoop 2.7-", e);
}
return createChecksumCreater27(Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));
} }
// cancel the processing if DFSClient is already closed. // cancel the processing if DFSClient is already closed.
@ -432,17 +595,21 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
static { static {
try { try {
CREATE_CHECKSUM = DFSClient.Conf.class.getDeclaredMethod("createChecksum"); PIPELINE_ACK_STATUS_GETTER = createPipelineAckStatusGetter();
CREATE_CHECKSUM.setAccessible(true); STORAGE_TYPE_SETTER = createStorageTypeSetter();
} catch (NoSuchMethodException e) { FILE_CREATER = createFileCreater();
throw new Error(e); BLOCK_ADDER = createBlockAdder();
LEASE_MANAGER = createLeaseManager();
DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();
PB_HELPER = createPBHelper();
CHECKSUM_CREATER = createChecksumCreater();
} catch (Exception e) {
final String msg = "Couldn't properly initialize access to HDFS internals. Please "
+ "update your WAL Provider to not make use of the 'asyncfs' provider. See "
+ "HBASE-16110 for more information.";
LOG.error(msg, e);
throw new Error(msg, e);
} }
PIPELINE_ACK_STATUS_GETTER = createPipelineAckStatusGetter();
STORAGE_TYPE_SETTER = createStorageTypeSetter();
FILE_CREATER = createFileCreater();
LEASE_MANAGER = createLeaseManager();
DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();
} }
static void beginFileLease(DFSClient client, String src, long inodeId) { static void beginFileLease(DFSClient client, String src, long inodeId) {
@ -454,11 +621,7 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
} }
static DataChecksum createChecksum(DFSClient client) { static DataChecksum createChecksum(DFSClient client) {
try { return CHECKSUM_CREATER.createChecksum(client.getConf());
return (DataChecksum) CREATE_CHECKSUM.invoke(client.getConf());
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
} }
static Status getStatus(PipelineAckProto ack) { static Status getStatus(PipelineAckProto ack) {
@ -530,8 +693,8 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException { OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {
OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build(); OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build();
int protoLen = proto.getSerializedSize(); int protoLen = proto.getSerializedSize();
ByteBuf buffer = ByteBuf buffer = channel.alloc()
channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen); .buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);
buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION); buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);
buffer.writeByte(Op.WRITE_BLOCK.code); buffer.writeByte(Op.WRITE_BLOCK.code);
proto.writeDelimitedTo(new ByteBufOutputStream(buffer)); proto.writeDelimitedTo(new ByteBufOutputStream(buffer));
@ -540,8 +703,8 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
private static void initialize(Configuration conf, final Channel channel, private static void initialize(Configuration conf, final Channel channel,
final DatanodeInfo dnInfo, final Enum<?> storageType, final DatanodeInfo dnInfo, final Enum<?> storageType,
final OpWriteBlockProto.Builder writeBlockProtoBuilder, final int timeoutMs, final OpWriteBlockProto.Builder writeBlockProtoBuilder, final int timeoutMs, DFSClient client,
DFSClient client, Token<BlockTokenIdentifier> accessToken, final Promise<Channel> promise) { Token<BlockTokenIdentifier> accessToken, final Promise<Channel> promise) {
Promise<Void> saslPromise = channel.eventLoop().newPromise(); Promise<Void> saslPromise = channel.eventLoop().newPromise();
trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise); trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);
saslPromise.addListener(new FutureListener<Void>() { saslPromise.addListener(new FutureListener<Void>() {
@ -560,32 +723,26 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
} }
private static List<Future<Channel>> connectToDataNodes(final Configuration conf, private static List<Future<Channel>> connectToDataNodes(final Configuration conf,
final DFSClient client, String clientName, final LocatedBlock locatedBlock, final DFSClient client, String clientName, final LocatedBlock locatedBlock, long maxBytesRcvd,
long maxBytesRcvd, long latestGS, BlockConstructionStage stage, DataChecksum summer, long latestGS, BlockConstructionStage stage, DataChecksum summer, EventLoop eventLoop) {
EventLoop eventLoop) {
Enum<?>[] storageTypes = locatedBlock.getStorageTypes(); Enum<?>[] storageTypes = locatedBlock.getStorageTypes();
DatanodeInfo[] datanodeInfos = locatedBlock.getLocations(); DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();
boolean connectToDnViaHostname = boolean connectToDnViaHostname = conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME,
conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT); DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);
final int timeoutMs = final int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);
conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, HdfsServerConstants.READ_TIMEOUT);
ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock()); ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());
blockCopy.setNumBytes(locatedBlock.getBlockSize()); blockCopy.setNumBytes(locatedBlock.getBlockSize());
ClientOperationHeaderProto header = ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()
ClientOperationHeaderProto .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy))
.newBuilder() .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))
.setBaseHeader( .setClientName(clientName).build();
BaseHeaderProto.newBuilder().setBlock(PBHelper.convert(blockCopy))
.setToken(PBHelper.convert(locatedBlock.getBlockToken())))
.setClientName(clientName).build();
ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer); ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);
final OpWriteBlockProto.Builder writeBlockProtoBuilder = final OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()
OpWriteBlockProto.newBuilder().setHeader(header) .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))
.setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name())) .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())
.setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes()) .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)
.setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS) .setRequestedChecksum(checksumProto)
.setRequestedChecksum(checksumProto) .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());
.setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());
List<Future<Channel>> futureList = new ArrayList<>(datanodeInfos.length); List<Future<Channel>> futureList = new ArrayList<>(datanodeInfos.length);
for (int i = 0; i < datanodeInfos.length; i++) { for (int i = 0; i < datanodeInfos.length; i++) {
final DatanodeInfo dnInfo = datanodeInfos[i]; final DatanodeInfo dnInfo = datanodeInfos[i];
@ -642,14 +799,11 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
ClientProtocol namenode = client.getNamenode(); ClientProtocol namenode = client.getNamenode();
HdfsFileStatus stat; HdfsFileStatus stat;
try { try {
stat = stat = FILE_CREATER.create(namenode, src,
FILE_CREATER.create( FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,
namenode, new EnumSetWritable<CreateFlag>(
src, overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),
FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), createParent, replication, blockSize);
clientName,
new EnumSetWritable<CreateFlag>(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet
.of(CREATE)), createParent, replication, blockSize);
} catch (Exception e) { } catch (Exception e) {
if (e instanceof RemoteException) { if (e instanceof RemoteException) {
throw (RemoteException) e; throw (RemoteException) e;
@ -663,12 +817,11 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
List<Future<Channel>> futureList = null; List<Future<Channel>> futureList = null;
try { try {
DataChecksum summer = createChecksum(client); DataChecksum summer = createChecksum(client);
locatedBlock = locatedBlock = BLOCK_ADDER.addBlock(namenode, src, client.getClientName(), null, null,
namenode.addBlock(src, client.getClientName(), null, null, stat.getFileId(), null); stat.getFileId(), null);
List<Channel> datanodeList = new ArrayList<>(); List<Channel> datanodeList = new ArrayList<>();
futureList = futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,
connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L, PIPELINE_SETUP_CREATE, PIPELINE_SETUP_CREATE, summer, eventLoop);
summer, eventLoop);
for (Future<Channel> future : futureList) { for (Future<Channel> future : futureList) {
// fail the creation if there are connection failures since we are fail-fast. The upper // fail the creation if there are connection failures since we are fail-fast. The upper
// layer should retry itself if needed. // layer should retry itself if needed.
@ -712,8 +865,8 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
return new FileSystemLinkResolver<FanOutOneBlockAsyncDFSOutput>() { return new FileSystemLinkResolver<FanOutOneBlockAsyncDFSOutput>() {
@Override @Override
public FanOutOneBlockAsyncDFSOutput doCall(Path p) throws IOException, public FanOutOneBlockAsyncDFSOutput doCall(Path p)
UnresolvedLinkException { throws IOException, UnresolvedLinkException {
return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication, return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,
blockSize, eventLoop); blockSize, eventLoop);
} }

View File

@ -86,7 +86,6 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.Builder; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.Builder;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus;
import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslPropertiesResolver;
@ -112,8 +111,7 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
private static final String NAME_DELIMITER = " "; private static final String NAME_DELIMITER = " ";
@VisibleForTesting @VisibleForTesting
static final String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY = static final String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY = "dfs.encrypt.data.transfer.cipher.suites";
"dfs.encrypt.data.transfer.cipher.suites";
@VisibleForTesting @VisibleForTesting
static final String AES_CTR_NOPADDING = "AES/CTR/NoPadding"; static final String AES_CTR_NOPADDING = "AES/CTR/NoPadding";
@ -185,7 +183,7 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
try { try {
cryptoCodecClass = Class.forName("org.apache.hadoop.crypto.CryptoCodec"); cryptoCodecClass = Class.forName("org.apache.hadoop.crypto.CryptoCodec");
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
LOG.warn("No CryptoCodec class found, should be hadoop 2.5-", e); LOG.debug("No CryptoCodec class found, should be hadoop 2.5-", e);
} }
if (cryptoCodecClass != null) { if (cryptoCodecClass != null) {
Method getInstanceMethod = null; Method getInstanceMethod = null;
@ -195,8 +193,12 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
break; break;
} }
} }
CREATE_CODEC = getInstanceMethod;
try { try {
if (getInstanceMethod == null) {
throw new NoSuchMethodException(
"Can not find suitable getInstance method in CryptoCodec");
}
CREATE_CODEC = getInstanceMethod;
CREATE_ENCRYPTOR = cryptoCodecClass.getMethod("createEncryptor"); CREATE_ENCRYPTOR = cryptoCodecClass.getMethod("createEncryptor");
CREATE_DECRYPTOR = cryptoCodecClass.getMethod("createDecryptor"); CREATE_DECRYPTOR = cryptoCodecClass.getMethod("createDecryptor");
@ -207,11 +209,14 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
Class<?> decryptorClass = Class.forName("org.apache.hadoop.crypto.Decryptor"); Class<?> decryptorClass = Class.forName("org.apache.hadoop.crypto.Decryptor");
INIT_DECRYPTOR = decryptorClass.getMethod("init", byte[].class, byte[].class); INIT_DECRYPTOR = decryptorClass.getMethod("init", byte[].class, byte[].class);
DECRYPT = decryptorClass.getMethod("decrypt", ByteBuffer.class, ByteBuffer.class); DECRYPT = decryptorClass.getMethod("decrypt", ByteBuffer.class, ByteBuffer.class);
} catch (NoSuchMethodException | ClassNotFoundException e) { } catch (Exception e) {
throw new Error(e); final String msg = "Couldn't properly initialize access to HDFS internals. Please "
+ "update your WAL Provider to not make use of the 'asyncfs' provider. See "
+ "HBASE-16110 for more information.";
LOG.error(msg, e);
throw new Error(msg, e);
} }
} else { } else {
LOG.warn("Can not initialize CryptoCodec, should be hadoop 2.5-");
CREATE_CODEC = null; CREATE_CODEC = null;
CREATE_ENCRYPTOR = null; CREATE_ENCRYPTOR = null;
CREATE_DECRYPTOR = null; CREATE_DECRYPTOR = null;
@ -329,62 +334,53 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
}; };
} }
private static SaslAdaptor createSaslAdaptor25() { private static SaslAdaptor createSaslAdaptor25()
try { throws NoSuchFieldException, NoSuchMethodException {
final Field trustedChannelResolverField = DFSClient.class final Field trustedChannelResolverField = DFSClient.class
.getDeclaredField("trustedChannelResolver"); .getDeclaredField("trustedChannelResolver");
trustedChannelResolverField.setAccessible(true); trustedChannelResolverField.setAccessible(true);
final Method getDataEncryptionKeyMethod = DFSClient.class.getMethod("getDataEncryptionKey"); final Method getDataEncryptionKeyMethod = DFSClient.class.getMethod("getDataEncryptionKey");
return new SaslAdaptor() { return new SaslAdaptor() {
@Override @Override
public TrustedChannelResolver getTrustedChannelResolver(DFSClient client) { public TrustedChannelResolver getTrustedChannelResolver(DFSClient client) {
try { try {
return (TrustedChannelResolver) trustedChannelResolverField.get(client); return (TrustedChannelResolver) trustedChannelResolverField.get(client);
} catch (IllegalAccessException e) { } catch (IllegalAccessException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
}
} }
}
@Override @Override
public SaslPropertiesResolver getSaslPropsResolver(DFSClient client) { public SaslPropertiesResolver getSaslPropsResolver(DFSClient client) {
return null; return null;
}
@Override
public AtomicBoolean getFallbackToSimpleAuth(DFSClient client) {
return null;
}
@Override
public DataEncryptionKey createDataEncryptionKey(DFSClient client) {
try {
return (DataEncryptionKey) getDataEncryptionKeyMethod.invoke(client);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
} }
}
@Override };
public AtomicBoolean getFallbackToSimpleAuth(DFSClient client) {
return null;
}
@Override
public DataEncryptionKey createDataEncryptionKey(DFSClient client) {
try {
return (DataEncryptionKey) getDataEncryptionKeyMethod.invoke(client);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
};
} catch (NoSuchFieldException | NoSuchMethodException e) {
throw new Error(e);
}
} }
private static SaslAdaptor createSaslAdaptor() { private static SaslAdaptor createSaslAdaptor()
Class<?> saslDataTransferClientClass = null; throws NoSuchFieldException, NoSuchMethodException {
try { try {
saslDataTransferClientClass = Class return createSaslAdaptor27(
.forName("org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient"); Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient"));
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
LOG.warn("No SaslDataTransferClient class found, should be hadoop 2.5-"); LOG.debug("No SaslDataTransferClient class found, should be hadoop 2.5-", e);
}
try {
return saslDataTransferClientClass != null ? createSaslAdaptor27(saslDataTransferClientClass)
: createSaslAdaptor25();
} catch (NoSuchFieldException | NoSuchMethodException e) {
throw new Error(e);
} }
return createSaslAdaptor25();
} }
private static CipherOptionHelper createCipherHelper25() { private static CipherOptionHelper createCipherHelper25() {
@ -451,9 +447,16 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
final Method getOutKeyMethod = cipherOptionClass.getMethod("getOutKey"); final Method getOutKeyMethod = cipherOptionClass.getMethod("getOutKey");
final Method getOutIvMethod = cipherOptionClass.getMethod("getOutIv"); final Method getOutIvMethod = cipherOptionClass.getMethod("getOutIv");
final Method convertCipherOptionsMethod = PBHelper.class.getMethod("convertCipherOptions", Class<?> pbHelperClass;
try {
pbHelperClass = Class.forName("org.apache.hadoop.hdfs.protocolPB.PBHelperClient");
} catch (ClassNotFoundException e) {
LOG.debug("No PBHelperClient class found, should be hadoop 2.7-", e);
pbHelperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;
}
final Method convertCipherOptionsMethod = pbHelperClass.getMethod("convertCipherOptions",
List.class); List.class);
final Method convertCipherOptionProtosMethod = PBHelper.class final Method convertCipherOptionProtosMethod = pbHelperClass
.getMethod("convertCipherOptionProtos", List.class); .getMethod("convertCipherOptionProtos", List.class);
final Method addAllCipherOptionMethod = DataTransferEncryptorMessageProto.Builder.class final Method addAllCipherOptionMethod = DataTransferEncryptorMessageProto.Builder.class
.getMethod("addAllCipherOption", Iterable.class); .getMethod("addAllCipherOption", Iterable.class);
@ -577,19 +580,16 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
}; };
} }
private static CipherOptionHelper createCipherHelper() { private static CipherOptionHelper createCipherHelper()
throws ClassNotFoundException, NoSuchMethodException {
Class<?> cipherOptionClass; Class<?> cipherOptionClass;
try { try {
cipherOptionClass = Class.forName("org.apache.hadoop.crypto.CipherOption"); cipherOptionClass = Class.forName("org.apache.hadoop.crypto.CipherOption");
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
LOG.warn("No CipherOption class found, should be hadoop 2.5-"); LOG.debug("No CipherOption class found, should be hadoop 2.5-", e);
return createCipherHelper25(); return createCipherHelper25();
} }
try { return createCipherHelper27(cipherOptionClass);
return createCipherHelper27(cipherOptionClass);
} catch (NoSuchMethodException | ClassNotFoundException e) {
throw new Error(e);
}
} }
private static TransparentCryptoHelper createTransparentCryptoHelper25() { private static TransparentCryptoHelper createTransparentCryptoHelper25() {
@ -646,25 +646,30 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
}; };
} }
private static TransparentCryptoHelper createTransparentCryptoHelper() { private static TransparentCryptoHelper createTransparentCryptoHelper()
throws NoSuchMethodException, ClassNotFoundException {
Class<?> feInfoClass; Class<?> feInfoClass;
try { try {
feInfoClass = Class.forName("org.apache.hadoop.fs.FileEncryptionInfo"); feInfoClass = Class.forName("org.apache.hadoop.fs.FileEncryptionInfo");
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
LOG.warn("No FileEncryptionInfo class found, should be hadoop 2.5-"); LOG.debug("No FileEncryptionInfo class found, should be hadoop 2.5-", e);
return createTransparentCryptoHelper25(); return createTransparentCryptoHelper25();
} }
try { return createTransparentCryptoHelper27(feInfoClass);
return createTransparentCryptoHelper27(feInfoClass);
} catch (NoSuchMethodException | ClassNotFoundException e) {
throw new Error(e);
}
} }
static { static {
SASL_ADAPTOR = createSaslAdaptor(); try {
CIPHER_OPTION_HELPER = createCipherHelper(); SASL_ADAPTOR = createSaslAdaptor();
TRANSPARENT_CRYPTO_HELPER = createTransparentCryptoHelper(); CIPHER_OPTION_HELPER = createCipherHelper();
TRANSPARENT_CRYPTO_HELPER = createTransparentCryptoHelper();
} catch (Exception e) {
final String msg = "Couldn't properly initialize access to HDFS internals. Please "
+ "update your WAL Provider to not make use of the 'asyncfs' provider. See "
+ "HBASE-16110 for more information.";
LOG.error(msg, e);
throw new Error(msg, e);
}
} }
/** /**
@ -828,40 +833,40 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
byte[] challenge = proto.getPayload().toByteArray(); byte[] challenge = proto.getPayload().toByteArray();
byte[] response = saslClient.evaluateChallenge(challenge); byte[] response = saslClient.evaluateChallenge(challenge);
switch (step) { switch (step) {
case 1: { case 1: {
List<Object> cipherOptions = null; List<Object> cipherOptions = null;
if (requestedQopContainsPrivacy()) { if (requestedQopContainsPrivacy()) {
cipherOptions = CIPHER_OPTION_HELPER.getCipherOptions(conf); cipherOptions = CIPHER_OPTION_HELPER.getCipherOptions(conf);
}
sendSaslMessage(ctx, response, cipherOptions);
ctx.flush();
step++;
break;
} }
case 2: { sendSaslMessage(ctx, response, cipherOptions);
assert response == null; ctx.flush();
checkSaslComplete(); step++;
Object cipherOption = break;
CIPHER_OPTION_HELPER.getCipherOption(proto, isNegotiatedQopPrivacy(), saslClient); }
ChannelPipeline p = ctx.pipeline(); case 2: {
while (p.first() != null) { assert response == null;
p.removeFirst(); checkSaslComplete();
} Object cipherOption = CIPHER_OPTION_HELPER.getCipherOption(proto,
if (cipherOption != null) { isNegotiatedQopPrivacy(), saslClient);
CryptoCodec codec = new CryptoCodec(conf, cipherOption); ChannelPipeline p = ctx.pipeline();
p.addLast(new EncryptHandler(codec), new DecryptHandler(codec)); while (p.first() != null) {
} else { p.removeFirst();
if (useWrap()) {
p.addLast(new SaslWrapHandler(saslClient),
new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 4),
new SaslUnwrapHandler(saslClient));
}
}
promise.trySuccess(null);
break;
} }
default: if (cipherOption != null) {
throw new IllegalArgumentException("Unrecognized negotiation step: " + step); CryptoCodec codec = new CryptoCodec(conf, cipherOption);
p.addLast(new EncryptHandler(codec), new DecryptHandler(codec));
} else {
if (useWrap()) {
p.addLast(new SaslWrapHandler(saslClient),
new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 4),
new SaslUnwrapHandler(saslClient));
}
}
promise.trySuccess(null);
break;
}
default:
throw new IllegalArgumentException("Unrecognized negotiation step: " + step);
} }
} else { } else {
ctx.fireChannelRead(msg); ctx.fireChannelRead(msg);