HDFS-10180. Ozone: Refactor container Namespace. Contributed by Anu Engineer.

This commit is contained in:
Chris Nauroth 2016-03-18 13:46:56 -07:00 committed by Owen O'Malley
parent 044cc47145
commit 4009b66090
17 changed files with 69 additions and 34 deletions

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.helpers;
package org.apache.hadoop.ozone.container.common.helpers;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.helpers;
package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.helpers;
package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.helpers;
package org.apache.hadoop.ozone.container.common.helpers;
/**
Contains protocol buffer helper classes.
**/

View File

@ -16,18 +16,18 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.ozoneimpl;
package org.apache.hadoop.ozone.container.common.impl;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.Type;
import org.apache.hadoop.ozone.container.helpers.ContainerData;
import org.apache.hadoop.ozone.container.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.interfaces.ContainerManager;
import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -61,7 +61,8 @@ public class Dispatcher implements ContainerDispatcher {
if ((cmdType == Type.CreateContainer) ||
(cmdType == Type.DeleteContainer) ||
(cmdType == Type.ReadContainer) ||
(cmdType == Type.ListContainer)) {
(cmdType == Type.ListContainer) ||
(cmdType == Type.UpdateContainer)) {
return containerProcessHandler(msg);
}
@ -97,6 +98,9 @@ public class Dispatcher implements ContainerDispatcher {
case ListContainer:
return ContainerUtils.unsupportedRequest(msg);
case UpdateContainer:
return ContainerUtils.unsupportedRequest(msg);
case ReadContainer:
return handleReadContainer(msg, cData);

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.ozoneimpl;
package org.apache.hadoop.ozone.container.common.impl;
/**
This package is contains Ozone container implementation.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.interfaces;
package org.apache.hadoop.ozone.container.common.interfaces;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandRequestProto;

View File

@ -16,12 +16,12 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.interfaces;
package org.apache.hadoop.ozone.container.common.interfaces;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.ozone.container.helpers.ContainerData;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import java.io.IOException;
import java.util.List;

View File

@ -0,0 +1,28 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.common;
/**
Common Container Layer. At this layer the abstractions are:
1. Containers - Both data and metadata containers.
2. Keys - Key/Value pairs that live inside a container.
3. Chunks - Keys can be composed of many chunks.
Ozone uses these abstractions to build Volumes, Buckets and Keys.
**/

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.transport.client;
package org.apache.hadoop.ozone.container.common.transport.client;
import com.google.common.base.Preconditions;
import io.netty.bootstrap.Bootstrap;
@ -30,7 +30,7 @@ import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConfiguration;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -15,13 +15,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.transport.client;
package org.apache.hadoop.ozone.container.common.transport.client;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.transport.client;
package org.apache.hadoop.ozone.container.common.transport.client;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
@ -24,7 +24,7 @@ import io.netty.handler.codec.protobuf.ProtobufDecoder;
import io.netty.handler.codec.protobuf.ProtobufEncoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;
/**

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.transport.server;
package org.apache.hadoop.ozone.container.common.transport.server;
import com.google.common.base.Preconditions;
import io.netty.bootstrap.ServerBootstrap;
@ -28,7 +28,7 @@ import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConfiguration;
import org.apache.hadoop.ozone.container.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
/**
* Creates a netty server endpoint that acts as the communication layer for

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.transport.server;
package org.apache.hadoop.ozone.container.common.transport.server;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
@ -24,7 +24,7 @@ import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
import org.apache.hadoop.ozone.container.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
/**
* Netty server handlers that respond to Network events.

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.transport.server;
package org.apache.hadoop.ozone.container.common.transport.server;
import com.google.common.base.Preconditions;
import io.netty.channel.ChannelInitializer;
@ -26,7 +26,7 @@ import io.netty.handler.codec.protobuf.ProtobufDecoder;
import io.netty.handler.codec.protobuf.ProtobufEncoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
import org.apache.hadoop.ozone.container.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
/**

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos
import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos
.ContainerCommandResponseProto;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import java.io.IOException;
import java.net.ServerSocket;

View File

@ -25,11 +25,14 @@ import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos.ContainerComm
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConfiguration;
import org.apache.hadoop.ozone.container.ContainerTestHelper;
import org.apache.hadoop.ozone.container.helpers.Pipeline;
import org.apache.hadoop.ozone.container.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.interfaces.ContainerManager;
import org.apache.hadoop.ozone.container.ozoneimpl.Dispatcher;
import org.apache.hadoop.ozone.container.transport.client.XceiverClient;
import org.apache.hadoop.ozone.container.common.helpers.Pipeline;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
import org.apache.hadoop.ozone.container.common.impl.Dispatcher;
import org.apache.hadoop.ozone.container.common.transport.client.XceiverClient;
import org.apache.hadoop.ozone.container.common.transport.server.XceiverServer;
import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerHandler;
import org.junit.Assert;
import org.junit.Test;