HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal)

This commit is contained in:
arp 2014-08-27 09:03:45 -07:00 committed by arp7
parent a7bcc95358
commit 7f49537ba1
6 changed files with 21 additions and 6 deletions

View File

@ -2,3 +2,7 @@
HDFS-6921. Add LazyPersist flag to FileStatus. (Arpit Agarwal) HDFS-6921. Add LazyPersist flag to FileStatus. (Arpit Agarwal)
HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal)

View File

@ -32,7 +32,8 @@ import org.apache.hadoop.classification.InterfaceStability;
@InterfaceStability.Unstable @InterfaceStability.Unstable
public enum StorageType { public enum StorageType {
DISK, DISK,
SSD; SSD,
RAM_DISK;
public static final StorageType DEFAULT = DISK; public static final StorageType DEFAULT = DISK;
public static final StorageType[] EMPTY_ARRAY = {}; public static final StorageType[] EMPTY_ARRAY = {};

View File

@ -1703,6 +1703,8 @@ public class PBHelper {
return StorageTypeProto.DISK; return StorageTypeProto.DISK;
case SSD: case SSD:
return StorageTypeProto.SSD; return StorageTypeProto.SSD;
case RAM_DISK:
return StorageTypeProto.RAM_DISK;
default: default:
throw new IllegalStateException( throw new IllegalStateException(
"BUG: StorageType not found, type=" + type); "BUG: StorageType not found, type=" + type);
@ -1731,6 +1733,8 @@ public class PBHelper {
return StorageType.DISK; return StorageType.DISK;
case SSD: case SSD:
return StorageType.SSD; return StorageType.SSD;
case RAM_DISK:
return StorageType.RAM_DISK;
default: default:
throw new IllegalStateException( throw new IllegalStateException(
"BUG: StorageTypeProto not found, type=" + type); "BUG: StorageTypeProto not found, type=" + type);

View File

@ -158,6 +158,7 @@ message FsPermissionProto {
enum StorageTypeProto { enum StorageTypeProto {
DISK = 1; DISK = 1;
SSD = 2; SSD = 2;
RAM_DISK = 3;
} }
/** /**
@ -260,7 +261,6 @@ message HdfsFileStatusProto {
// Optional field for fileId // Optional field for fileId
optional uint64 fileId = 13 [default = 0]; // default as an invalid id optional uint64 fileId = 13 [default = 0]; // default as an invalid id
optional int32 childrenNum = 14 [default = -1]; optional int32 childrenNum = 14 [default = -1];
// Optional field for file encryption // Optional field for file encryption
optional FileEncryptionInfoProto fileEncryptionInfo = 15; optional FileEncryptionInfoProto fileEncryptionInfo = 15;
optional bool isLazyPersist = 16 [default = false]; optional bool isLazyPersist = 16 [default = false];

View File

@ -448,13 +448,16 @@ public class TestPBHelper {
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2", DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2",
AdminStates.DECOMMISSIONED), AdminStates.DECOMMISSIONED),
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3", DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3",
AdminStates.NORMAL) AdminStates.NORMAL),
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h4",
AdminStates.NORMAL),
}; };
String[] storageIDs = {"s1", "s2", "s3"}; String[] storageIDs = {"s1", "s2", "s3", "s4"};
StorageType[] media = { StorageType[] media = {
StorageType.DISK, StorageType.DISK,
StorageType.SSD, StorageType.SSD,
StorageType.DISK StorageType.DISK,
StorageType.RAM_DISK
}; };
LocatedBlock lb = new LocatedBlock( LocatedBlock lb = new LocatedBlock(
new ExtendedBlock("bp12", 12345, 10, 53), new ExtendedBlock("bp12", 12345, 10, 53),

View File

@ -44,10 +44,11 @@ public class TestDataDirs {
File dir1 = new File("/dir1"); File dir1 = new File("/dir1");
File dir2 = new File("/dir2"); File dir2 = new File("/dir2");
File dir3 = new File("/dir3"); File dir3 = new File("/dir3");
File dir4 = new File("/dir4");
// Verify that a valid string is correctly parsed, and that storage // Verify that a valid string is correctly parsed, and that storage
// type is not case-sensitive // type is not case-sensitive
String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3"; String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3,[ram_disk]/dir4";
conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1); conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1);
locations = DataNode.getStorageLocations(conf); locations = DataNode.getStorageLocations(conf);
assertThat(locations.size(), is(4)); assertThat(locations.size(), is(4));
@ -59,6 +60,8 @@ public class TestDataDirs {
assertThat(locations.get(2).getUri(), is(dir2.toURI())); assertThat(locations.get(2).getUri(), is(dir2.toURI()));
assertThat(locations.get(3).getStorageType(), is(StorageType.DISK)); assertThat(locations.get(3).getStorageType(), is(StorageType.DISK));
assertThat(locations.get(3).getUri(), is(dir3.toURI())); assertThat(locations.get(3).getUri(), is(dir3.toURI()));
assertThat(locations.get(4).getStorageType(), is(StorageType.RAM_DISK));
assertThat(locations.get(4).getUri(), is(dir4.toURI()));
// Verify that an unrecognized storage type result in an exception. // Verify that an unrecognized storage type result in an exception.
String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2"; String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2";