HDFS-14303. check block directory logic not correct when there is only meta file, print no meaning warn log. Contributed by qiang Liu.

This commit is contained in:
Wei-Chiu Chuang 2019-06-19 10:27:53 -07:00
parent f5ecc0bc08
commit 71ecd2e411
2 changed files with 69 additions and 1 deletions

View File

@ -1374,7 +1374,7 @@ public class FsVolumeImpl implements FsVolumeSpi {
if (!Block.isBlockFilename(file)) {
if (isBlockMetaFile(Block.BLOCK_FILE_PREFIX, file.getName())) {
long blockId = Block.getBlockId(file.getName());
verifyFileLocation(file.getParentFile(), bpFinalizedDir,
verifyFileLocation(file, bpFinalizedDir,
blockId);
report.add(new ScanInfo(blockId, null, file, this));
}

View File

@ -26,6 +26,7 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
@ -72,6 +73,9 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.AutoCloseableLock;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
@ -394,6 +398,70 @@ public class TestDirectoryScanner {
}
}
/**
* test scan only meta file NOT generate wrong folder structure warn log.
*/
@Test(timeout=600000)
public void testScanDirectoryStructureWarn() throws Exception {
//add a logger stream to check what has printed to log
ByteArrayOutputStream loggerStream = new ByteArrayOutputStream();
org.apache.log4j.Logger rootLogger =
org.apache.log4j.Logger.getRootLogger();
rootLogger.setLevel(Level.INFO);
WriterAppender writerAppender =
new WriterAppender(new SimpleLayout(), loggerStream);
rootLogger.addAppender(writerAppender);
cluster = new MiniDFSCluster
.Builder(CONF)
.storageTypes(new StorageType[] {
StorageType.RAM_DISK, StorageType.DEFAULT })
.numDataNodes(1)
.build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
scanner = new DirectoryScanner(fds, CONF);
scanner.setRetainDiffs(true);
FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));
// Create a file file on RAM_DISK
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH, true);
// Ensure no difference between volumeMap and disk.
scan(1, 0, 0, 0, 0, 0);
//delete thre block file , left the meta file alone
deleteBlockFile();
//scan to ensure log warn not printed
scan(1, 1, 0, 1, 0, 0, 0);
//ensure the warn log not appear and missing block log do appear
String logContent = new String(loggerStream.toByteArray());
String missingBlockWarn = "Deleted a metadata file" +
" for the deleted block";
String dirStructureWarnLog = " found in invalid directory." +
" Expected directory: ";
assertFalse("directory check print meaningless warning message",
logContent.contains(dirStructureWarnLog));
assertTrue("missing block warn log not appear",
logContent.contains(missingBlockWarn));
LOG.info("check pass");
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
cluster = null;
}
}
@Test(timeout = 300000)
public void testDeleteBlockOnTransientStorage() throws Exception {
cluster = new MiniDFSCluster.Builder(CONF)