HDFS-7950. Fix TestFsDatasetImpl#testAddVolumes failure on Windows. (Contributed by Xiaoyu Yao)

This commit is contained in:
Arpit Agarwal 2015-03-18 12:33:59 -07:00
parent 30da99cbaf
commit d462c62755
2 changed files with 10 additions and 4 deletions

View File

@ -1178,6 +1178,9 @@ Release 2.7.0 - UNRELEASED
HDFS-7948. TestDataNodeHotSwapVolumes#testAddVolumeFailures failed on HDFS-7948. TestDataNodeHotSwapVolumes#testAddVolumeFailures failed on
Windows. (Xiaoyu Yao via Arpit Agarwal) Windows. (Xiaoyu Yao via Arpit Agarwal)
HDFS-7950. Fix TestFsDatasetImpl#testAddVolumes failure on Windows.
(Xiaoyu Yao via Arpit Agarwal)
BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
HDFS-7720. Quota by Storage Type API, tools and ClientNameNode HDFS-7720. Quota by Storage Type API, tools and ClientNameNode

View File

@ -21,6 +21,7 @@ import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystemTestHelper; import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
@ -111,7 +112,7 @@ public class TestFsDatasetImpl {
List<String> dirStrings = new ArrayList<String>(); List<String> dirStrings = new ArrayList<String>();
for (int i = 0; i < numDirs; i++) { for (int i = 0; i < numDirs; i++) {
File loc = new File(BASE_DIR + "/data" + i); File loc = new File(BASE_DIR + "/data" + i);
dirStrings.add(loc.toString()); dirStrings.add(new Path(loc.toString()).toUri().toString());
loc.mkdirs(); loc.mkdirs();
dirs.add(createStorageDirectory(loc)); dirs.add(createStorageDirectory(loc));
when(storage.getStorageDir(i)).thenReturn(dirs.get(i)); when(storage.getStorageDir(i)).thenReturn(dirs.get(i));
@ -158,8 +159,9 @@ public class TestFsDatasetImpl {
} }
for (int i = 0; i < numNewVolumes; i++) { for (int i = 0; i < numNewVolumes; i++) {
String path = BASE_DIR + "/newData" + i; String path = BASE_DIR + "/newData" + i;
expectedVolumes.add(path); String pathUri = new Path(path).toUri().toString();
StorageLocation loc = StorageLocation.parse(path); expectedVolumes.add(new File(pathUri).toString());
StorageLocation loc = StorageLocation.parse(pathUri);
Storage.StorageDirectory sd = createStorageDirectory(new File(path)); Storage.StorageDirectory sd = createStorageDirectory(new File(path));
DataStorage.VolumeBuilder builder = DataStorage.VolumeBuilder builder =
new DataStorage.VolumeBuilder(storage, sd); new DataStorage.VolumeBuilder(storage, sd);
@ -178,7 +180,8 @@ public class TestFsDatasetImpl {
actualVolumes.add( actualVolumes.add(
dataset.getVolumes().get(numExistingVolumes + i).getBasePath()); dataset.getVolumes().get(numExistingVolumes + i).getBasePath());
} }
assertEquals(actualVolumes, expectedVolumes); assertEquals(actualVolumes.size(), expectedVolumes.size());
assertTrue(actualVolumes.containsAll(expectedVolumes));
} }
@Test(timeout = 30000) @Test(timeout = 30000)