HDFS-13211. Fix a bug in DirectoryDiffList.getMinListForRange. Contributed by Shashikant Banerjee

This commit is contained in:
Tsz-Wo Nicholas Sze 2018-03-01 14:12:15 -08:00
parent 22928c0c60
commit 96e8f260ab
3 changed files with 161 additions and 86 deletions

View File

@ -97,7 +97,13 @@ public class DirectoryDiffList implements DiffList<DirectoryDiff> {
public void setDiff(ChildrenDiff diff) { public void setDiff(ChildrenDiff diff) {
this.diff = diff; this.diff = diff;
} }
@Override
public String toString() {
return "->" + skipTo;
}
} }
/** /**
* SkipListNode is an implementation of a DirectoryDiff List node, * SkipListNode is an implementation of a DirectoryDiff List node,
* which stores a Directory Diff and references to subsequent nodes. * which stores a Directory Diff and references to subsequent nodes.
@ -191,6 +197,11 @@ public class DirectoryDiffList implements DiffList<DirectoryDiff> {
return skipDiffList.get(level).getSkipTo(); return skipDiffList.get(level).getSkipTo();
} }
} }
@Override
public String toString() {
return diff != null ? "" + diff.getSnapshotId() : "?";
}
} }
/** /**
@ -225,13 +236,6 @@ public class DirectoryDiffList implements DiffList<DirectoryDiff> {
this.skipInterval = interval; this.skipInterval = interval;
} }
public static DirectoryDiffList createSkipList(int capacity, int interval,
int skipLevel) {
DirectoryDiffList list =
new DirectoryDiffList(capacity, interval, skipLevel);
return list;
}
/** /**
* Adds the specified data element to the beginning of the SkipList, * Adds the specified data element to the beginning of the SkipList,
* if the element is not already present. * if the element is not already present.
@ -444,15 +448,15 @@ public class DirectoryDiffList implements DiffList<DirectoryDiff> {
* order to generate all the changes occurred between fromIndex and * order to generate all the changes occurred between fromIndex and
* toIndex. * toIndex.
* *
* @param fromIndex index from where the summation has to start * @param fromIndex index from where the summation has to start(inclusive)
* @param toIndex index till where the summation has to end * @param toIndex index till where the summation has to end(exclusive)
* @return list of Directory Diff * @return list of Directory Diff
*/ */
@Override @Override
public List<DirectoryDiff> getMinListForRange(int fromIndex, int toIndex, public List<DirectoryDiff> getMinListForRange(int fromIndex, int toIndex,
INodeDirectory dir) { INodeDirectory dir) {
final List<DirectoryDiff> subList = new ArrayList<>(); final List<DirectoryDiff> subList = new ArrayList<>();
final int toSnapshotId = get(toIndex).getSnapshotId(); final int toSnapshotId = get(toIndex - 1).getSnapshotId();
for (SkipListNode current = getNode(fromIndex); current != null;) { for (SkipListNode current = getNode(fromIndex); current != null;) {
SkipListNode next = null; SkipListNode next = null;
ChildrenDiff childrenDiff = null; ChildrenDiff childrenDiff = null;
@ -467,8 +471,21 @@ public class DirectoryDiffList implements DiffList<DirectoryDiff> {
subList.add(childrenDiff == null ? curDiff : subList.add(childrenDiff == null ? curDiff :
new DirectoryDiff(curDiff.getSnapshotId(), dir, childrenDiff)); new DirectoryDiff(curDiff.getSnapshotId(), dir, childrenDiff));
if (current.getDiff().compareTo(toSnapshotId) == 0) {
break;
}
current = next; current = next;
} }
return subList; return subList;
} }
@Override
public String toString() {
final StringBuilder b = new StringBuilder(getClass().getSimpleName());
b.append("\nhead: ").append(head).append(head.skipDiffList);
for (SkipListNode n : skipNodeList) {
b.append("\n ").append(n).append(n.skipDiffList);
}
return b.toString();
}
} }

View File

@ -106,6 +106,11 @@ public interface ReadOnlyList<E> extends Iterable<E> {
public E get(int i) { public E get(int i) {
return list.get(i); return list.get(i);
} }
@Override
public String toString() {
return list.toString();
}
}; };
} }
@ -234,6 +239,19 @@ public interface ReadOnlyList<E> extends Iterable<E> {
public <T> T[] toArray(T[] a) { public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public String toString() {
if (list.isEmpty()) {
return "[]";
}
final Iterator<E> i = list.iterator();
final StringBuilder b = new StringBuilder("[").append(i.next());
for(; i.hasNext();) {
b.append(", ").append(i.next());
}
return b + "]";
}
}; };
} }
} }

View File

@ -19,14 +19,13 @@ package org.apache.hadoop.hdfs.server.namenode.snapshot;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.hdfs.util.ReadOnlyList;
import org.junit.After; import org.junit.After;
@ -34,8 +33,8 @@ import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Iterator;
/** /**
* This class tests the DirectoryDiffList API's. * This class tests the DirectoryDiffList API's.
@ -45,11 +44,6 @@ public class TestDirectoryDiffList{
SnapshotTestHelper.disableLogs(); SnapshotTestHelper.disableLogs();
} }
private static final long SEED = 0;
private static final short REPL = 3;
private static final short REPL_2 = 1;
private static final long BLOCKSIZE = 1024;
private static final Configuration CONF = new Configuration(); private static final Configuration CONF = new Configuration();
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
private static FSNamesystem fsn; private static FSNamesystem fsn;
@ -58,9 +52,8 @@ public class TestDirectoryDiffList{
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCKSIZE); cluster =
cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(REPL).format(true) new MiniDFSCluster.Builder(CONF).numDataNodes(0).format(true).build();
.build();
cluster.waitActive(); cluster.waitActive();
fsn = cluster.getNamesystem(); fsn = cluster.getNamesystem();
fsdir = fsn.getFSDirectory(); fsdir = fsn.getFSDirectory();
@ -75,88 +68,135 @@ public class TestDirectoryDiffList{
} }
} }
private void compareChildrenList(ReadOnlyList<INode> list1, static void assertList(List<INode> expected, List<INode> computed) {
ReadOnlyList<INode> list2) { Assert.assertEquals(expected.size(), computed.size());
Assert.assertEquals(list1.size(), list2.size()); for (int index = 0; index < expected.size(); index++) {
for (int index = 0; index < list1.size(); index++) { Assert.assertEquals(expected.get(index), computed.get(index));
Assert.assertEquals(list1.get(index), list2.get(index));
} }
} }
private void verifyChildrenListForAllSnapshots(DirectoryDiffList list, static void verifyChildrenList(DirectoryDiffList skip, INodeDirectory dir) {
INodeDirectory dir) { final int n = skip.size();
for (int index = 0; for (int i = 0; i < skip.size(); i++) {
index < list.size(); index++) { final List<INode> expected =
ReadOnlyList<INode> list1 = dir.getChildrenList(index); ReadOnlyList.Util.asList(dir.getChildrenList(i));
List<DirectoryWithSnapshotFeature.DirectoryDiff> subList = final List<INode> computed = getChildrenList(skip, i, n, dir);
list.getMinListForRange(index, list.size() - 1, dir); try {
ReadOnlyList<INode> list2 = getChildrenList(dir, subList); assertList(expected, computed);
compareChildrenList(list1, list2); } catch (AssertionError ae) {
throw new AssertionError(
"i = " + i + "\ncomputed = " + computed + "\nexpected = "
+ expected, ae);
}
} }
} }
private ReadOnlyList<INode> getChildrenList(INodeDirectory currentINode, static void verifyChildrenList(
List<DirectoryWithSnapshotFeature.DirectoryDiff> list) { DiffList<DirectoryDiff> array, DirectoryDiffList skip,
List<INode> children = null; INodeDirectory dir, List<INode> childrenList) {
final DirectoryWithSnapshotFeature.ChildrenDiff final int n = array.size();
combined = new DirectoryWithSnapshotFeature.ChildrenDiff(); Assert.assertEquals(n, skip.size());
for (DirectoryWithSnapshotFeature.DirectoryDiff d : list) { for (int i = 0; i < n - 1; i++) {
for (int j = i + 1; j < n - 1; j++) {
final List<INode> expected = getCombined(array, i, j, dir)
.apply2Previous(childrenList);
final List<INode> computed = getCombined(skip, i, j, dir)
.apply2Previous(childrenList);
try {
assertList(expected, computed);
} catch (AssertionError ae) {
throw new AssertionError(
"i = " + i + ", j = " + j + "\ncomputed = " + computed
+ "\nexpected = " + expected + "\n" + skip, ae);
}
}
}
}
private static ChildrenDiff getCombined(
DiffList<DirectoryDiff> list, int from, int to, INodeDirectory dir) {
final List<DirectoryDiff> minList = list.getMinListForRange(from, to, dir);
final ChildrenDiff combined = new ChildrenDiff();
for (DirectoryDiff d : minList) {
combined.combinePosterior(d.getChildrenDiff(), null); combined.combinePosterior(d.getChildrenDiff(), null);
} }
children = combined.apply2Current(ReadOnlyList.Util.asList( return combined;
currentINode.getChildrenList(Snapshot.CURRENT_STATE_ID))); }
return ReadOnlyList.Util.asReadOnlyList(children);
static List<INode> getChildrenList(
DiffList<DirectoryDiff> list, int from, int to, INodeDirectory dir) {
final ChildrenDiff combined = getCombined(list, from, to, dir);
return combined.apply2Current(ReadOnlyList.Util.asList(dir.getChildrenList(
Snapshot.CURRENT_STATE_ID)));
}
static Path getChildPath(Path parent, int i) {
return new Path(parent, "c" + i);
} }
@Test @Test
public void testDirectoryDiffList() throws Exception { public void testAddLast() throws Exception {
final Path sdir1 = new Path("/dir1"); testAddLast(7);
hdfs.mkdirs(sdir1);
SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s0");
for (int i = 1; i < 31; i++) {
final Path file = new Path(sdir1, "file" + i);
DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPL_2, SEED);
SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s" + i);
}
INodeDirectory sdir1Node = fsdir.getINode(sdir1.toString()).asDirectory();
DiffList<DirectoryDiff> diffs =
sdir1Node.getDiffs().asList();
Iterator<DirectoryDiff> itr = diffs.iterator();
DirectoryDiffList skipList = DirectoryDiffList.createSkipList(0, 3, 5);
while (itr.hasNext()) {
skipList.addLast(itr.next());
}
// verify that the both the children list obtained from hdfs and
// DirectoryDiffList are same
verifyChildrenListForAllSnapshots(skipList, sdir1Node);
} }
@Test static void testAddLast(int n) throws Exception {
public void testDirectoryDiffList2() throws Exception { final Path root = new Path("/testAddLast" + n);
final Path sdir1 = new Path("/dir1"); hdfs.mkdirs(root);
hdfs.mkdirs(sdir1); SnapshotTestHelper.createSnapshot(hdfs, root, "s0");
for (int i = 1; i < 31; i++) { for (int i = 1; i < n; i++) {
final Path file = new Path(sdir1, "file" + i); final Path child = getChildPath(root, i);
DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPL_2, SEED); hdfs.mkdirs(child);
SnapshotTestHelper.createSnapshot(hdfs, root, "s" + i);
} }
SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s0"); INodeDirectory dir = fsdir.getINode(root.toString()).asDirectory();
for (int i = 1; i < 31; i++) { DiffList<DirectoryDiff> diffs = dir.getDiffs().asList();
final Path file = new Path(sdir1, "file" + (31 - i));
hdfs.delete(file, false);
SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s" + i);
}
INodeDirectory sdir1Node = fsdir.getINode(sdir1.toString()).asDirectory();
DiffList<DirectoryDiff> diffs = sdir1Node.getDiffs().asList();
int index = diffs.size() - 1; final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5);
DirectoryDiffList skipList = DirectoryDiffList.createSkipList(0, 3, 5); final DiffList<DirectoryDiff> arrayList = new DiffListByArrayList<>(0);
while (index >= 0) { for (DirectoryDiff d : diffs) {
skipList.addFirst(diffs.get(index)); skipList.addLast(d);
index--; arrayList.addLast(d);
}
// verify that the both the children list obtained from hdfs and
// DirectoryDiffList are same
verifyChildrenList(skipList, dir);
verifyChildrenList(arrayList, skipList, dir, Collections.emptyList());
}
@Test
public void testAddFirst() throws Exception {
testAddFirst(7);
}
static void testAddFirst(int n) throws Exception {
final Path root = new Path("/testAddFirst" + n);
hdfs.mkdirs(root);
for (int i = 1; i < n; i++) {
final Path child = getChildPath(root, i);
hdfs.mkdirs(child);
}
INodeDirectory dir = fsdir.getINode(root.toString()).asDirectory();
SnapshotTestHelper.createSnapshot(hdfs, root, "s0");
for (int i = 1; i < n; i++) {
final Path child = getChildPath(root, n - i);
hdfs.delete(child, false);
SnapshotTestHelper.createSnapshot(hdfs, root, "s" + i);
}
DiffList<DirectoryDiff> diffs = dir.getDiffs().asList();
List<INode> childrenList = ReadOnlyList.Util.asList(dir.getChildrenList(
diffs.get(0).getSnapshotId()));
final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5);
final DiffList<DirectoryDiff> arrayList = new DiffListByArrayList<>(0);
for (int i = diffs.size() - 1; i >= 0; i--) {
final DirectoryDiff d = diffs.get(i);
skipList.addFirst(d);
arrayList.addFirst(d);
} }
// verify that the both the children list obtained from hdfs and // verify that the both the children list obtained from hdfs and
// DirectoryDiffList are same // DirectoryDiffList are same
verifyChildrenListForAllSnapshots(skipList, sdir1Node); verifyChildrenList(skipList, dir);
verifyChildrenList(arrayList, skipList, dir, childrenList);
} }
} }