HDDS-71. Send ContainerType to Datanode during container creation. Contributed by Bharat Viswanadham.

This commit is contained in:
Mukul Kumar Singh 2018-05-21 22:57:08 +05:30
parent 73e9120ad7
commit 132a547dea
4 changed files with 51 additions and 0 deletions

View File

@ -243,6 +243,8 @@ public final class ContainerProtocolCalls {
ContainerProtos.ContainerData.Builder containerData = ContainerProtos ContainerProtos.ContainerData.Builder containerData = ContainerProtos
.ContainerData.newBuilder(); .ContainerData.newBuilder();
containerData.setContainerID(containerID); containerData.setContainerID(containerID);
containerData.setContainerType(ContainerProtos.ContainerType
.KeyValueContainer);
createRequest.setContainerData(containerData.build()); createRequest.setContainerData(containerData.build());
String id = client.getPipeline().getLeader().getUuidString(); String id = client.getPipeline().getLeader().getUuidString();

View File

@ -225,6 +225,12 @@ message ContainerData {
optional int64 size = 7; optional int64 size = 7;
optional int64 keyCount = 8; optional int64 keyCount = 8;
optional ContainerLifeCycleState state = 9 [default = OPEN]; optional ContainerLifeCycleState state = 9 [default = OPEN];
optional ContainerType containerType = 10 [default = KeyValueContainer];
optional string containerDBType = 11;
}
enum ContainerType {
KeyValueContainer = 1;
} }

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.ozone.container.common.helpers;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerType;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerLifeCycleState; .ContainerLifeCycleState;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
@ -47,6 +49,8 @@ public class ContainerData {
private long maxSize; private long maxSize;
private long containerID; private long containerID;
private ContainerLifeCycleState state; private ContainerLifeCycleState state;
private ContainerType containerType;
private String containerDBType;
/** /**
* Constructs a ContainerData Object. * Constructs a ContainerData Object.
@ -99,9 +103,26 @@ public class ContainerData {
if (protoData.hasSize()) { if (protoData.hasSize()) {
data.setMaxSize(protoData.getSize()); data.setMaxSize(protoData.getSize());
} }
if(protoData.hasContainerType()) {
data.setContainerType(protoData.getContainerType());
}
if(protoData.hasContainerDBType()) {
data.setContainerDBType(protoData.getContainerDBType());
}
return data; return data;
} }
public String getContainerDBType() {
return containerDBType;
}
public void setContainerDBType(String containerDBType) {
this.containerDBType = containerDBType;
}
/** /**
* Returns a ProtoBuf Message from ContainerData. * Returns a ProtoBuf Message from ContainerData.
* *
@ -141,9 +162,24 @@ public class ContainerData {
builder.setSize(this.getMaxSize()); builder.setSize(this.getMaxSize());
} }
if(this.getContainerType() != null) {
builder.setContainerType(containerType);
}
if(this.getContainerDBType() != null) {
builder.setContainerDBType(containerDBType);
}
return builder.build(); return builder.build();
} }
public void setContainerType(ContainerType containerType) {
this.containerType = containerType;
}
public ContainerType getContainerType() {
return this.containerType;
}
/** /**
* Adds metadata. * Adds metadata.
*/ */

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.hdds.protocol.proto
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.StorageTypeProto; .StorageContainerDatanodeProtocolProtos.StorageTypeProto;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
@ -400,6 +401,12 @@ public class ContainerManagerImpl implements ContainerManager {
.toString()); .toString());
containerData.setContainerPath(containerFile.toString()); containerData.setContainerPath(containerFile.toString());
if(containerData.getContainerDBType() == null) {
String impl = conf.getTrimmed(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL,
OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_DEFAULT);
containerData.setContainerDBType(impl);
}
ContainerProtos.ContainerData protoData = containerData ContainerProtos.ContainerData protoData = containerData
.getProtoBufMessage(); .getProtoBufMessage();
protoData.writeDelimitedTo(dos); protoData.writeDelimitedTo(dos);