HDFS-14509. DN throws InvalidToken due to inequality of password when upgrade NN 2.x to 3.x. Contributed by Yuxuan Wang and Konstantin Shvachko.

(cherry picked from commit 72ae371e7a)
This commit is contained in:
Chen Liang 2019-10-08 11:56:52 -07:00
parent b74754a448
commit 01d9952f63
2 changed files with 60 additions and 0 deletions

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AccessModeProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenSecretProto;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.UserGroupInformation;
@ -142,6 +143,7 @@ public class BlockTokenIdentifier extends TokenIdentifier {
}
public void setHandshakeMsg(byte[] bytes) {
cache = null; // invalidate the cache
handshakeMsg = bytes;
}
@ -214,6 +216,15 @@ public class BlockTokenIdentifier extends TokenIdentifier {
if (!dis.markSupported()) {
throw new IOException("Could not peek first byte.");
}
// this.cache should be assigned the raw bytes from the input data for
// upgrading compatibility. If we won't mutate fields and call getBytes()
// for something (e.g retrieve password), we should return the raw bytes
// instead of serializing the instance self fields to bytes, because we may
// lose newly added fields which we can't recognize
this.cache = IOUtils.readFullyToByteArray(dis);
dis.reset();
dis.mark(1);
final byte firstByte = dis.readByte();
dis.reset();

View File

@ -32,6 +32,7 @@ import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.DataOutput;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Calendar;
@ -41,6 +42,7 @@ import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mockito.Mockito;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@ -837,4 +839,51 @@ public class TestBlockToken {
}
}
}
@Test
public void testRetrievePasswordWithUnknownFields() throws IOException {
BlockTokenIdentifier id = new BlockTokenIdentifier();
BlockTokenIdentifier spyId = Mockito.spy(id);
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
DataOutput out = (DataOutput) invocation.getArguments()[0];
invocation.callRealMethod();
// write something at the end that BlockTokenIdentifier#readFields()
// will ignore, but which is still a part of the password
out.write(7);
return null;
}
}).when(spyId).write(Mockito.any());
BlockTokenSecretManager sm =
new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime,
0, 1, "fake-pool", null, false);
// master create password
byte[] password = sm.createPassword(spyId);
BlockTokenIdentifier slaveId = new BlockTokenIdentifier();
slaveId.readFields(
new DataInputStream(new ByteArrayInputStream(spyId.getBytes())));
// slave retrieve password
assertArrayEquals(password, sm.retrievePassword(slaveId));
}
@Test
public void testRetrievePasswordWithRecognizableFieldsOnly()
throws IOException {
BlockTokenSecretManager sm =
new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime,
0, 1, "fake-pool", null, false);
// master create password
BlockTokenIdentifier masterId = new BlockTokenIdentifier();
byte[] password = sm.createPassword(masterId);
// set cache to null, so that master getBytes() were only recognizable bytes
masterId.setExpiryDate(masterId.getExpiryDate());
BlockTokenIdentifier slaveId = new BlockTokenIdentifier();
slaveId.readFields(
new DataInputStream(new ByteArrayInputStream(masterId.getBytes())));
assertArrayEquals(password, sm.retrievePassword(slaveId));
}
}