HDDS-39. Ozone: Compile Ozone/HDFS/Cblock protobuf files with proto3 compiler using maven protoc plugin.

Contributed by Mukul Kumar Singh.
This commit is contained in:
Anu Engineer 2018-05-11 11:08:45 -07:00
parent 3a93af731e
commit c1d64d60f6
51 changed files with 291 additions and 286 deletions

View File

@ -20,18 +20,18 @@ package org.apache.hadoop.hdds.scm;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import io.netty.bootstrap.Bootstrap; import org.apache.ratis.shaded.io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel; import org.apache.ratis.shaded.io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup; import org.apache.ratis.shaded.io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup; import org.apache.ratis.shaded.io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel; import org.apache.ratis.shaded.io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.logging.LogLevel; import org.apache.ratis.shaded.io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler; import org.apache.ratis.shaded.io.netty.handler.logging.LoggingHandler;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils; import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.hdds.scm; package org.apache.hadoop.hdds.scm;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import io.netty.channel.Channel; import org.apache.ratis.shaded.io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext; import org.apache.ratis.shaded.io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler; import org.apache.ratis.shaded.io.netty.channel.SimpleChannelInboundHandler;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -17,15 +17,17 @@
*/ */
package org.apache.hadoop.hdds.scm; package org.apache.hadoop.hdds.scm;
import io.netty.channel.ChannelInitializer; import org.apache.ratis.shaded.io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline; import org.apache.ratis.shaded.io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel; import org.apache.ratis.shaded.io.netty.channel.socket.SocketChannel;
import io.netty.handler.codec.protobuf.ProtobufDecoder; import org.apache.ratis.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
import io.netty.handler.codec.protobuf.ProtobufEncoder; import org.apache.ratis.shaded.io.netty.handler.codec.protobuf.ProtobufEncoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import org.apache.ratis.shaded.io.netty.handler.codec.protobuf
import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender; .ProtobufVarint32FrameDecoder;
import org.apache.ratis.shaded.io.netty.handler.codec.protobuf
.ProtobufVarint32LengthFieldPrepender;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import java.util.concurrent.Semaphore; import java.util.concurrent.Semaphore;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hdds.scm; package org.apache.hadoop.hdds.scm;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.annotation.Metrics;

View File

@ -19,14 +19,15 @@
package org.apache.hadoop.hdds.scm; package org.apache.hadoop.hdds.scm;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException; import org.apache.ratis.shaded.com.google.protobuf
.InvalidProtocolBufferException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils; import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.ratis.RatisHelper; import org.apache.ratis.RatisHelper;
@ -37,7 +38,6 @@ import org.apache.ratis.protocol.RaftPeer;
import org.apache.ratis.rpc.RpcType; import org.apache.ratis.rpc.RpcType;
import org.apache.ratis.rpc.SupportedRpcType; import org.apache.ratis.rpc.SupportedRpcType;
import org.apache.ratis.shaded.com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.ratis.shaded.com.google.protobuf.ShadedProtoUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -211,8 +211,7 @@ public final class XceiverClientRatis extends XceiverClientSpi {
private RaftClientReply sendRequest(ContainerCommandRequestProto request) private RaftClientReply sendRequest(ContainerCommandRequestProto request)
throws IOException { throws IOException {
boolean isReadOnlyRequest = isReadOnly(request); boolean isReadOnlyRequest = isReadOnly(request);
ByteString byteString = ByteString byteString = request.toByteString();
ShadedProtoUtil.asShadedByteString(request.toByteArray());
LOG.debug("sendCommand {} {}", isReadOnlyRequest, request); LOG.debug("sendCommand {} {}", isReadOnlyRequest, request);
final RaftClientReply reply = isReadOnlyRequest ? final RaftClientReply reply = isReadOnlyRequest ?
getClient().sendReadOnly(() -> byteString) : getClient().sendReadOnly(() -> byteString) :
@ -224,8 +223,7 @@ public final class XceiverClientRatis extends XceiverClientSpi {
private CompletableFuture<RaftClientReply> sendRequestAsync( private CompletableFuture<RaftClientReply> sendRequestAsync(
ContainerCommandRequestProto request) throws IOException { ContainerCommandRequestProto request) throws IOException {
boolean isReadOnlyRequest = isReadOnly(request); boolean isReadOnlyRequest = isReadOnly(request);
ByteString byteString = ByteString byteString = request.toByteString();
ShadedProtoUtil.asShadedByteString(request.toByteArray());
LOG.debug("sendCommandAsync {} {}", isReadOnlyRequest, request); LOG.debug("sendCommandAsync {} {}", isReadOnlyRequest, request);
return isReadOnlyRequest ? getClient().sendReadOnlyAsync(() -> byteString) : return isReadOnlyRequest ? getClient().sendReadOnlyAsync(() -> byteString) :
getClient().sendAsync(() -> byteString); getClient().sendAsync(() -> byteString);
@ -237,7 +235,7 @@ public final class XceiverClientRatis extends XceiverClientSpi {
final RaftClientReply reply = sendRequest(request); final RaftClientReply reply = sendRequest(request);
Preconditions.checkState(reply.isSuccess()); Preconditions.checkState(reply.isSuccess());
return ContainerCommandResponseProto.parseFrom( return ContainerCommandResponseProto.parseFrom(
ShadedProtoUtil.asByteString(reply.getMessage().getContent())); reply.getMessage().getContent());
} }
/** /**
@ -257,7 +255,7 @@ public final class XceiverClientRatis extends XceiverClientSpi {
.thenApply(reply -> { .thenApply(reply -> {
try { try {
return ContainerCommandResponseProto.parseFrom( return ContainerCommandResponseProto.parseFrom(
ShadedProtoUtil.asByteString(reply.getMessage().getContent())); reply.getMessage().getContent());
} catch (InvalidProtocolBufferException e) { } catch (InvalidProtocolBufferException e) {
throw new CompletionException(e); throw new CompletionException(e);
} }

View File

@ -25,8 +25,9 @@ import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.scm.protocolPB import org.apache.hadoop.hdds.scm.protocolPB
.StorageContainerLocationProtocolClientSideTranslatorPB; .StorageContainerLocationProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerData;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ReadContainerResponseProto; .ReadContainerResponseProto;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto

View File

@ -18,12 +18,12 @@
package org.apache.hadoop.hdds.scm.storage; package org.apache.hadoop.hdds.scm.storage;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.fs.Seekable; import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.hdds.scm.XceiverClientManager; import org.apache.hadoop.hdds.scm.XceiverClientManager;
import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ReadChunkResponseProto; .ReadChunkResponseProto;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.hdds.scm.storage; package org.apache.hadoop.hdds.scm.storage;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.hdds.scm.XceiverClientManager; import org.apache.hadoop.hdds.scm.XceiverClientManager;
import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyData;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyValue; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyValue;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import java.io.IOException; import java.io.IOException;

View File

@ -78,7 +78,58 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</dependencies> </dependencies>
<build> <build>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>${os-maven-plugin.version}</version>
</extension>
</extensions>
<plugins> <plugins>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>${protobuf-maven-plugin.version}</version>
<extensions>true</extensions>
<configuration>
<protocArtifact>
com.google.protobuf:protoc:${protobuf-compile.version}:exe:${os.detected.classifier}
</protocArtifact>
<protoSourceRoot>${basedir}/src/main/proto/</protoSourceRoot>
<includes>
<include>DatanodeContainerProtocol.proto</include>
</includes>
<outputDirectory>target/generated-sources/java</outputDirectory>
<clearOutputDirectory>false</clearOutputDirectory>
</configuration>
<executions>
<execution>
<id>compile-protoc</id>
<goals>
<goal>compile</goal>
<goal>test-compile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>generate-sources</phase>
<configuration>
<tasks>
<replace token="com.google.protobuf" value="org.apache.ratis.shaded.com.google.protobuf"
dir="target/generated-sources/java/org/apache/hadoop/hdds/protocol/datanode/proto">
</replace>
</tasks>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
@ -107,7 +158,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<directory>${basedir}/src/main/proto</directory> <directory>${basedir}/src/main/proto</directory>
<includes> <includes>
<include>StorageContainerLocationProtocol.proto</include> <include>StorageContainerLocationProtocol.proto</include>
<include>DatanodeContainerProtocol.proto</include>
<include>hdds.proto</include> <include>hdds.proto</include>
<include>ScmBlockLocationProtocol.proto</include> <include>ScmBlockLocationProtocol.proto</include>
</includes> </includes>

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.hdds.client; package org.apache.hadoop.hdds.client;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
/** /**

View File

@ -21,9 +21,9 @@ package org.apache.hadoop.hdds.scm;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.hdds.scm.client;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerData;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import java.io.IOException; import java.io.IOException;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hdds.scm.container.common.helpers; package org.apache.hadoop.hdds.scm.container.common.helpers;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import java.io.IOException; import java.io.IOException;

View File

@ -18,39 +18,41 @@
package org.apache.hadoop.hdds.scm.storage; package org.apache.hadoop.hdds.scm.storage;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .GetKeyRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.GetKeyResponseProto; .GetKeyResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.GetSmallFileRequestProto; .GetSmallFileRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.GetSmallFileResponseProto; .GetSmallFileResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyData;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .PutKeyRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.PutSmallFileRequestProto; .PutSmallFileRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ReadChunkRequestProto; .ReadChunkRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ReadChunkResponseProto; .ReadChunkResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ReadContainerRequestProto; .ReadContainerRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ReadContainerResponseProto; .ReadContainerResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.WriteChunkRequestProto; .WriteChunkRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyValue; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyValue;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import java.io.IOException; import java.io.IOException;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.ozone.container.common.helpers; package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.ozone.container.common.helpers; package org.apache.hadoop.ozone.container.common.helpers;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import java.io.IOException; import java.io.IOException;

View File

@ -1,38 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ratis.shaded.com.google.protobuf;
/** Utilities for the shaded protobuf in Ratis. */
public interface ShadedProtoUtil {
/**
* @param bytes
* @return the wrapped shaded {@link ByteString} (no coping).
*/
static ByteString asShadedByteString(byte[] bytes) {
return ByteString.wrap(bytes);
}
/**
* @param shaded
* @return a {@link com.google.protobuf.ByteString} (require coping).
*/
static com.google.protobuf.ByteString asByteString(ByteString shaded) {
return com.google.protobuf.ByteString.copyFrom(
shaded.asReadOnlyByteBuffer());
}
}

View File

@ -1,22 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ratis.shaded.com.google.protobuf;
/**
* This package contains classes related to the shaded protobuf in Apache Ratis.
*/

View File

@ -24,7 +24,7 @@
// This file contains protocol buffers that are used to transfer data // This file contains protocol buffers that are used to transfer data
// to and from the datanode. // to and from the datanode.
option java_package = "org.apache.hadoop.hdds.protocol.proto"; option java_package = "org.apache.hadoop.hdds.protocol.datanode.proto";
option java_outer_classname = "ContainerProtos"; option java_outer_classname = "ContainerProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;
package hadoop.hdds.datanode; package hadoop.hdds.datanode;

View File

@ -18,12 +18,12 @@
package org.apache.hadoop.ozone.container.common.helpers; package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.commons.codec.binary.Hex; import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl; import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl;
@ -40,22 +40,22 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CHECKSUM_MISMATCH; .Result.CHECKSUM_MISMATCH;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CONTAINER_INTERNAL_ERROR; .Result.CONTAINER_INTERNAL_ERROR;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CONTAINER_NOT_FOUND; .Result.CONTAINER_NOT_FOUND;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.INVALID_WRITE_SIZE; .Result.INVALID_WRITE_SIZE;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.IO_EXCEPTION; .Result.IO_EXCEPTION;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.OVERWRITE_FLAG_REQUIRED; .Result.OVERWRITE_FLAG_REQUIRED;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNABLE_TO_FIND_CHUNK; .Result.UNABLE_TO_FIND_CHUNK;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNABLE_TO_FIND_DATA_DIR; .Result.UNABLE_TO_FIND_DATA_DIR;
/** /**
* Set of utility functions used by the chunk Manager. * Set of utility functions used by the chunk Manager.

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.ozone.container.common.helpers;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerLifeCycleState; .ContainerLifeCycleState;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.ozone.container.common.helpers;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.annotation.Metrics;

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl; import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl;
@ -42,9 +42,9 @@ import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import static org.apache.commons.io.FilenameUtils.removeExtension; import static org.apache.commons.io.FilenameUtils.removeExtension;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result
.INVALID_ARGUMENT; .INVALID_ARGUMENT;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result
.UNABLE_TO_FIND_DATA_DIR; .UNABLE_TO_FIND_DATA_DIR;
import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.ozone.container.common.helpers; package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
/** /**
* File Utils are helper routines used by putSmallFile and getSmallFile * File Utils are helper routines used by putSmallFile and getSmallFile

View File

@ -21,17 +21,17 @@ import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.ozone.container.common.utils.ContainerCache; import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStore;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.NO_SUCH_KEY; .Result.NO_SUCH_KEY;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNABLE_TO_READ_METADATA_DB; .Result.UNABLE_TO_READ_METADATA_DB;
/** /**
* Utils functions to help key functions. * Utils functions to help key functions.

View File

@ -21,7 +21,7 @@ import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
@ -40,10 +40,10 @@ import java.nio.file.StandardCopyOption;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CONTAINER_INTERNAL_ERROR; .Result.CONTAINER_INTERNAL_ERROR;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNSUPPORTED_REQUEST; .Result.UNSUPPORTED_REQUEST;
/** /**
* An implementation of ChunkManager that is used by default in ozone. * An implementation of ChunkManager that is used by default in ozone.

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdfs.server.datanode.StorageLocation; import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos; .StorageContainerDatanodeProtocolProtos;
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto
@ -79,26 +79,26 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CONTAINER_EXISTS; .Result.CONTAINER_EXISTS;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CONTAINER_INTERNAL_ERROR; .Result.CONTAINER_INTERNAL_ERROR;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CONTAINER_NOT_FOUND; .Result.CONTAINER_NOT_FOUND;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ERROR_IN_COMPACT_DB; .Result.ERROR_IN_COMPACT_DB;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.INVALID_CONFIG; .Result.INVALID_CONFIG;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.IO_EXCEPTION; .Result.IO_EXCEPTION;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.NO_SUCH_ALGORITHM; .Result.NO_SUCH_ALGORITHM;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNABLE_TO_READ_METADATA_DB; .Result.UNABLE_TO_READ_METADATA_DB;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNCLOSED_CONTAINER_IO; .Result.UNCLOSED_CONTAINER_IO;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.UNSUPPORTED_REQUEST; .Result.UNSUPPORTED_REQUEST;
import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION;
/** /**

View File

@ -19,16 +19,16 @@
package org.apache.hadoop.ozone.container.common.impl; package org.apache.hadoop.ozone.container.common.impl;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Type;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
import org.apache.hadoop.ozone.container.common.helpers.ChunkUtils; import org.apache.hadoop.ozone.container.common.helpers.ChunkUtils;
@ -48,14 +48,14 @@ import java.security.NoSuchAlgorithmException;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.CLOSED_CONTAINER_IO; .Result.CLOSED_CONTAINER_IO;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.GET_SMALL_FILE_ERROR; .Result.GET_SMALL_FILE_ERROR;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.NO_SUCH_ALGORITHM; .Result.NO_SUCH_ALGORITHM;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.PUT_SMALL_FILE_ERROR; .Result.PUT_SMALL_FILE_ERROR;
/** /**
* Ozone Container dispatcher takes a call from the netty server and routes it * Ozone Container dispatcher takes a call from the netty server and routes it

View File

@ -23,7 +23,7 @@ import com.google.common.primitives.Longs;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.KeyData; import org.apache.hadoop.ozone.container.common.helpers.KeyData;
@ -40,8 +40,8 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.NO_SUCH_KEY; .Result.NO_SUCH_KEY;
/** /**
* Key Manager impl. * Key Manager impl.

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.container.common.interfaces;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.ozone.container.common.interfaces; package org.apache.hadoop.ozone.container.common.interfaces;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
/** /**

View File

@ -19,13 +19,14 @@
package org.apache.hadoop.ozone.container.common.statemachine.background; package org.apache.hadoop.ozone.container.common.statemachine.background;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.protobuf.InvalidProtocolBufferException; import org.apache.ratis.shaded.com.google.protobuf
.InvalidProtocolBufferException;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.container.common.helpers import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException; .StorageContainerException;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;

View File

@ -19,13 +19,14 @@
package org.apache.hadoop.ozone.container.common.transport.server; package org.apache.hadoop.ozone.container.common.transport.server;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import io.netty.bootstrap.ServerBootstrap; import org.apache.ratis.shaded.io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel; import org.apache.ratis.shaded.io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup; import org.apache.ratis.shaded.io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup; import org.apache.ratis.shaded.io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel; import org.apache.ratis.shaded.io.netty.channel.socket.nio
import io.netty.handler.logging.LogLevel; .NioServerSocketChannel;
import io.netty.handler.logging.LoggingHandler; import org.apache.ratis.shaded.io.netty.handler.logging.LogLevel;
import org.apache.ratis.shaded.io.netty.handler.logging.LoggingHandler;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;

View File

@ -18,11 +18,11 @@
package org.apache.hadoop.ozone.container.common.transport.server; package org.apache.hadoop.ozone.container.common.transport.server;
import io.netty.channel.ChannelHandlerContext; import org.apache.ratis.shaded.io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler; import org.apache.ratis.shaded.io.netty.channel.SimpleChannelInboundHandler;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -19,14 +19,16 @@
package org.apache.hadoop.ozone.container.common.transport.server; package org.apache.hadoop.ozone.container.common.transport.server;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import io.netty.channel.ChannelInitializer; import org.apache.ratis.shaded.io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline; import org.apache.ratis.shaded.io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel; import org.apache.ratis.shaded.io.netty.channel.socket.SocketChannel;
import io.netty.handler.codec.protobuf.ProtobufDecoder; import org.apache.ratis.shaded.io.netty.handler.codec.protobuf.ProtobufDecoder;
import io.netty.handler.codec.protobuf.ProtobufEncoder; import org.apache.ratis.shaded.io.netty.handler.codec.protobuf.ProtobufEncoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import org.apache.ratis.shaded.io.netty.handler.codec.protobuf
import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender; .ProtobufVarint32FrameDecoder;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.ratis.shaded.io.netty.handler.codec.protobuf
.ProtobufVarint32LengthFieldPrepender;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;

View File

@ -19,13 +19,14 @@
package org.apache.hadoop.ozone.container.common.transport.server.ratis; package org.apache.hadoop.ozone.container.common.transport.server.ratis;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException; import org.apache.ratis.shaded.com.google.protobuf
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; .InvalidProtocolBufferException;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.WriteChunkRequestProto; .WriteChunkRequestProto;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import org.apache.ratis.conf.RaftProperties; import org.apache.ratis.conf.RaftProperties;
@ -34,7 +35,6 @@ import org.apache.ratis.protocol.RaftClientRequest;
import org.apache.ratis.protocol.RaftPeerId; import org.apache.ratis.protocol.RaftPeerId;
import org.apache.ratis.server.storage.RaftStorage; import org.apache.ratis.server.storage.RaftStorage;
import org.apache.ratis.shaded.com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.ratis.shaded.com.google.protobuf.ShadedProtoUtil;
import org.apache.ratis.shaded.proto.RaftProtos.LogEntryProto; import org.apache.ratis.shaded.proto.RaftProtos.LogEntryProto;
import org.apache.ratis.shaded.proto.RaftProtos.SMLogEntryProto; import org.apache.ratis.shaded.proto.RaftProtos.SMLogEntryProto;
import org.apache.ratis.statemachine.StateMachineStorage; import org.apache.ratis.statemachine.StateMachineStorage;
@ -159,8 +159,8 @@ public class ContainerStateMachine extends BaseStateMachine {
.build(); .build();
log = SMLogEntryProto.newBuilder() log = SMLogEntryProto.newBuilder()
.setData(getShadedByteString(commitContainerCommandProto)) .setData(commitContainerCommandProto.toByteString())
.setStateMachineData(getShadedByteString(dataContainerCommandProto)) .setStateMachineData(dataContainerCommandProto.toByteString())
.build(); .build();
} else if (proto.getCmdType() == ContainerProtos.Type.CreateContainer) { } else if (proto.getCmdType() == ContainerProtos.Type.CreateContainer) {
log = SMLogEntryProto.newBuilder() log = SMLogEntryProto.newBuilder()
@ -175,21 +175,16 @@ public class ContainerStateMachine extends BaseStateMachine {
return new TransactionContextImpl(this, request, log); return new TransactionContextImpl(this, request, log);
} }
private ByteString getShadedByteString(ContainerCommandRequestProto proto) {
return ShadedProtoUtil.asShadedByteString(proto.toByteArray());
}
private ContainerCommandRequestProto getRequestProto(ByteString request) private ContainerCommandRequestProto getRequestProto(ByteString request)
throws InvalidProtocolBufferException { throws InvalidProtocolBufferException {
return ContainerCommandRequestProto.parseFrom( return ContainerCommandRequestProto.parseFrom(request);
ShadedProtoUtil.asByteString(request));
} }
private Message runCommand(ContainerCommandRequestProto requestProto) { private Message runCommand(ContainerCommandRequestProto requestProto) {
LOG.trace("dispatch {}", requestProto); LOG.trace("dispatch {}", requestProto);
ContainerCommandResponseProto response = dispatcher.dispatch(requestProto); ContainerCommandResponseProto response = dispatcher.dispatch(requestProto);
LOG.trace("response {}", response); LOG.trace("response {}", response);
return () -> ShadedProtoUtil.asShadedByteString(response.toByteArray()); return () -> response.toByteString();
} }
private CompletableFuture<Message> handleWriteChunk( private CompletableFuture<Message> handleWriteChunk(

View File

@ -26,8 +26,9 @@ import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
import org.apache.hadoop.hdds.scm.client.ScmClient; import org.apache.hadoop.hdds.scm.client.ScmClient;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerData;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerLifeCycleState; .ContainerLifeCycleState;
import java.io.IOException; import java.io.IOException;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.client.io;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.fs.Seekable; import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo; import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.client.io;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup; import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.ozone.client.io; package org.apache.hadoop.ozone.client.io;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyData;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;

View File

@ -19,19 +19,19 @@
package org.apache.hadoop.ozone.container; package org.apache.hadoop.ozone.container;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.commons.codec.binary.Hex; import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto; .ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyValue; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyValue;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;

View File

@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.scm.TestUtils; import org.apache.hadoop.hdds.scm.TestUtils;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdfs.server.datanode.StorageLocation; import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;

View File

@ -22,7 +22,7 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdfs.server.datanode.StorageLocation; import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;
@ -36,9 +36,7 @@ import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.common.helpers.KeyData; import org.apache.hadoop.ozone.container.common.helpers.KeyData;
import org.apache.hadoop.ozone.container.common.helpers.KeyUtils; import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
import org.apache.hadoop.ozone.web.utils.OzoneUtils;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStore;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -77,7 +75,7 @@ import static org.apache.hadoop.ozone.container.ContainerTestHelper.getChunk;
import static org.apache.hadoop.ozone.container.ContainerTestHelper.getData; import static org.apache.hadoop.ozone.container.ContainerTestHelper.getData;
import static org.apache.hadoop.ozone.container.ContainerTestHelper import static org.apache.hadoop.ozone.container.ContainerTestHelper
.setDataChecksum; .setDataChecksum;
import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.Stage.COMBINED; .Stage.COMBINED;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;

View File

@ -26,9 +26,11 @@ import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandResponseProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.container.ContainerTestHelper; import org.apache.hadoop.ozone.container.ContainerTestHelper;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.ozone.container.ozoneimpl; package org.apache.hadoop.ozone.container.ozoneimpl;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;
@ -29,7 +29,6 @@ import org.apache.hadoop.hdds.scm.XceiverClient;
import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;

View File

@ -18,11 +18,13 @@
package org.apache.hadoop.ozone.container.server; package org.apache.hadoop.ozone.container.server;
import io.netty.channel.embedded.EmbeddedChannel; import org.apache.ratis.shaded.io.netty.channel.embedded.EmbeddedChannel;
import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandResponseProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.scm.TestUtils; import org.apache.hadoop.hdds.scm.TestUtils;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.ozone.scm; package org.apache.hadoop.ozone.scm;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.MiniOzoneCluster;

View File

@ -26,24 +26,22 @@ import java.util.ArrayList;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandResponseProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.container.ContainerTestHelper; import org.apache.hadoop.ozone.container.ContainerTestHelper;
import org.apache.hadoop.ozone.web.utils.OzoneUtils;
import org.apache.hadoop.hdds.scm.XceiverClientManager; import org.apache.hadoop.hdds.scm.XceiverClientManager;
import org.apache.hadoop.hdds.scm.XceiverClientMetrics; import org.apache.hadoop.hdds.scm.XceiverClientMetrics;
import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB; import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.common.helpers.KeyData; import org.apache.hadoop.ozone.container.common.helpers.KeyData;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.ozone.genesis; package org.apache.hadoop.ozone.genesis;
import com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ByteString;
import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
@ -53,15 +53,22 @@ import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.CreateContainerRequestProto; .ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ReadChunkRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.WriteChunkRequestProto; .CreateContainerRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto; .ReadChunkRequestProto;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.WriteChunkRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.PutKeyRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.GetKeyRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerData;
import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;

View File

@ -104,6 +104,11 @@
<mssql.version>6.2.1.jre7</mssql.version> <mssql.version>6.2.1.jre7</mssql.version>
<okhttp.version>2.7.5</okhttp.version> <okhttp.version>2.7.5</okhttp.version>
<!-- Maven protoc compiler -->
<protobuf-maven-plugin.version>0.5.1</protobuf-maven-plugin.version>
<protobuf-compile.version>3.1.0</protobuf-compile.version>
<os-maven-plugin.version>1.5.0.Final</os-maven-plugin.version>
<!-- define the Java language version used by the compiler --> <!-- define the Java language version used by the compiler -->
<javac.version>1.8</javac.version> <javac.version>1.8</javac.version>
@ -413,7 +418,7 @@
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-applications-distributedshell</artifactId> <artifactId>hadoop-yarn-applications-distributedshell</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
@ -1737,8 +1742,8 @@
</ignores> </ignores>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>maven-antrun-plugin</artifactId>
<executions> <executions>
<execution> <execution>