HDDS-378. Remove dependencies between hdds/ozone and hdfs proto files. Contributed by Elek, Marton.

This commit is contained in:
Arpit Agarwal 2018-10-16 10:40:00 -07:00
parent 53e5173bd1
commit 5f0b43fa93
30 changed files with 102 additions and 57 deletions

View File

@ -218,12 +218,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<param>
${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${basedir}/../../hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/
</param>
<param>
${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>

View File

@ -28,7 +28,6 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdds;
import "hdfs.proto";
import "hdds.proto";

View File

@ -28,7 +28,6 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdds;
import "hdfs.proto";
import "hdds.proto";
/**

View File

@ -73,15 +73,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>
${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${basedir}/../../hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/
</param>
<param>
${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
</param>
<param>
${basedir}/../../hadoop-hdds/common/src/main/proto/
</param>
@ -106,4 +97,4 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</plugin>
</plugins>
</build>
</project>
</project>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.ozone.client;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import java.util.List;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.ozone.client;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.ozone.client.protocol;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.client.*;
import org.apache.hadoop.hdds.client.OzoneQuota;

View File

@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.net.NetUtils;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.ozone.client.rpc;
import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.retry.RetryPolicy;

View File

@ -82,12 +82,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<param>
${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${basedir}/../../hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/
</param>
<param>
${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
</param>
<param>
${basedir}/../../hadoop-hdds/common/src/main/proto/
</param>

View File

@ -0,0 +1,64 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.protocol;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.StorageTypeProto;
/**
* Ozone specific storage types.
*/
public enum StorageType {
RAM_DISK,
SSD,
DISK,
ARCHIVE;
public static final StorageType DEFAULT = DISK;
public StorageTypeProto toProto() {
switch (this) {
case DISK:
return StorageTypeProto.DISK;
case SSD:
return StorageTypeProto.SSD;
case ARCHIVE:
return StorageTypeProto.ARCHIVE;
case RAM_DISK:
return StorageTypeProto.RAM_DISK;
default:
throw new IllegalStateException(
"BUG: StorageType not found, type=" + this);
}
}
public static StorageType valueOf(StorageTypeProto type) {
switch (type) {
case DISK:
return DISK;
case SSD:
return SSD;
case ARCHIVE:
return ARCHIVE;
case RAM_DISK:
return RAM_DISK;
default:
throw new IllegalStateException(
"BUG: StorageTypeProto not found, type=" + type);
}
}
}

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;

View File

@ -23,7 +23,7 @@ import java.util.Map;
import java.util.stream.Collectors;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;
@ -230,8 +230,7 @@ public final class OmBucketArgs implements Auditable {
builder.setIsVersionEnabled(isVersionEnabled);
}
if(storageType != null) {
builder.setStorageType(
PBHelperClient.convertStorageType(storageType));
builder.setStorageType(storageType.toProto());
}
return builder.build();
}
@ -250,7 +249,7 @@ public final class OmBucketArgs implements Auditable {
OMPBHelper::convertOzoneAcl).collect(Collectors.toList()),
bucketArgs.hasIsVersionEnabled() ?
bucketArgs.getIsVersionEnabled() : null,
bucketArgs.hasStorageType() ? PBHelperClient.convertStorageType(
bucketArgs.hasStorageType() ? StorageType.valueOf(
bucketArgs.getStorageType()) : null);
}
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.ozone.om.helpers;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;
@ -230,8 +230,7 @@ public final class OmBucketInfo implements Auditable {
.addAllAcls(acls.stream().map(
OMPBHelper::convertOzoneAcl).collect(Collectors.toList()))
.setIsVersionEnabled(isVersionEnabled)
.setStorageType(PBHelperClient.convertStorageType(
storageType))
.setStorageType(storageType.toProto())
.setCreationTime(creationTime)
.build();
}
@ -248,7 +247,7 @@ public final class OmBucketInfo implements Auditable {
bucketInfo.getAclsList().stream().map(
OMPBHelper::convertOzoneAcl).collect(Collectors.toList()),
bucketInfo.getIsVersionEnabled(),
PBHelperClient.convertStorageType(
bucketInfo.getStorageType()), bucketInfo.getCreationTime());
StorageType.valueOf(bucketInfo.getStorageType()),
bucketInfo.getCreationTime());
}
}

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.ozone.web.handlers;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.web.utils.JsonUtils;

View File

@ -34,7 +34,6 @@ Ozone Manager. Ozone Manager manages the namespace for ozone.
This is similar to Namenode for Ozone.
*/
import "hdfs.proto";
import "hdds.proto";
enum Status {
@ -170,17 +169,24 @@ message BucketInfo {
required string bucketName = 2;
repeated OzoneAclInfo acls = 3;
required bool isVersionEnabled = 4 [default = false];
required hadoop.hdfs.StorageTypeProto storageType = 5 [default = DISK];
required StorageTypeProto storageType = 5 [default = DISK];
required uint64 creationTime = 6;
}
enum StorageTypeProto {
DISK = 1;
SSD = 2;
ARCHIVE = 3;
RAM_DISK = 4;
}
message BucketArgs {
required string volumeName = 1;
required string bucketName = 2;
repeated OzoneAclInfo addAcls = 3;
repeated OzoneAclInfo removeAcls = 4;
optional bool isVersionEnabled = 5;
optional hadoop.hdfs.StorageTypeProto storageType = 6;
optional StorageTypeProto storageType = 6;
}
message OzoneAclInfo {

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.ozone.web;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.web.response.BucketInfo;
import org.apache.hadoop.ozone.OzoneConsts;
import org.junit.Test;

View File

@ -22,7 +22,7 @@ import com.google.common.collect.Sets;
import com.google.common.primitives.Longs;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.client.rest;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.client.rpc;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.ozone.om;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.ozone.MiniOzoneCluster;

View File

@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.om;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
import org.apache.hadoop.ozone.MiniOzoneCluster;

View File

@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.om;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;

View File

@ -29,7 +29,7 @@ import java.nio.file.NoSuchFileException;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.OzoneRestUtils;
import org.apache.hadoop.ozone.client.rest.OzoneException;

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.ozone.om;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.om.helpers.OmBucketArgs;

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.ozone.om;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;

View File

@ -16,7 +16,7 @@
*/
package org.apache.hadoop.ozone.om;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.server.ServerUtils;
import org.apache.hadoop.hdfs.DFSUtil;

View File

@ -29,9 +29,9 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.client.io.OzoneInputStream;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;

View File

@ -26,7 +26,7 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdds.protocol.StorageType;
import org.apache.hadoop.ozone.OzoneAcl;
/**