HDFS-12872. EC Checksum broken when BlockAccessToken is enabled.

This commit is contained in:
Xiao Chen 2017-12-05 20:48:02 -08:00
parent 05c347fe51
commit 56b1ff80dd
3 changed files with 14 additions and 7 deletions

View File

@ -192,6 +192,7 @@ public class LocatedBlocks {
+ "\n underConstruction=" + underConstruction + "\n underConstruction=" + underConstruction
+ "\n blocks=" + blocks + "\n blocks=" + blocks
+ "\n lastLocatedBlock=" + lastLocatedBlock + "\n lastLocatedBlock=" + lastLocatedBlock
+ "\n isLastBlockComplete=" + isLastBlockComplete + "}"; + "\n isLastBlockComplete=" + isLastBlockComplete
+ "\n ecPolicy=" + ecPolicy + "}";
} }
} }

View File

@ -1378,12 +1378,11 @@ public class BlockManager implements BlockStatsMXBean {
b.getStorageIDs()); b.getStorageIDs());
} }
sb.setBlockTokens(blockTokens); sb.setBlockTokens(blockTokens);
} else {
b.setBlockToken(blockTokenSecretManager.generateToken(
NameNode.getRemoteUser().getShortUserName(),
b.getBlock(), EnumSet.of(mode), b.getStorageTypes(),
b.getStorageIDs()));
} }
b.setBlockToken(blockTokenSecretManager.generateToken(
NameNode.getRemoteUser().getShortUserName(),
b.getBlock(), EnumSet.of(mode), b.getStorageTypes(),
b.getStorageIDs()));
} }
} }

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
@ -33,8 +34,12 @@ import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.slf4j.event.Level;
import java.io.IOException; import java.io.IOException;
import java.util.Random;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
/** /**
* This test serves a prototype to demo the idea proposed so far. It creates two * This test serves a prototype to demo the idea proposed so far. It creates two
@ -77,6 +82,7 @@ public class TestFileChecksum {
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY, conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY,
false); false);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 0); conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 0);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
Path ecPath = new Path(ecDir); Path ecPath = new Path(ecDir);
cluster.getFileSystem().mkdir(ecPath, FsPermission.getDirDefault()); cluster.getFileSystem().mkdir(ecPath, FsPermission.getDirDefault());
@ -89,6 +95,7 @@ public class TestFileChecksum {
bytesPerCRC = conf.getInt( bytesPerCRC = conf.getInt(
HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY,
HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT); HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT);
GenericTestUtils.setLogLevel(FileChecksumHelper.LOG, Level.DEBUG);
} }
@After @After
@ -518,7 +525,7 @@ public class TestFileChecksum {
LocatedBlock locatedBlock = locatedBlocks.get(0); LocatedBlock locatedBlock = locatedBlocks.get(0);
DatanodeInfo[] datanodes = locatedBlock.getLocations(); DatanodeInfo[] datanodes = locatedBlock.getLocations();
DatanodeInfo chosenDn = datanodes[0]; DatanodeInfo chosenDn = datanodes[new Random().nextInt(datanodes.length)];
int idx = 0; int idx = 0;
for (DataNode dn : cluster.getDataNodes()) { for (DataNode dn : cluster.getDataNodes()) {