From 8b832f3c3556ef3f970bac636ef9c70ee9dd260d Mon Sep 17 00:00:00 2001 From: Anu Engineer Date: Thu, 5 Apr 2018 11:24:39 -0700 Subject: [PATCH] HDFS-13405. Ozone: Rename HDSL to HDDS. Contributed by Ajay Kumar, Elek Marton, Mukul Kumar Singh, Shashikant Banerjee and Anu Engineer. --- .../main/resources/assemblies/hadoop-src.xml | 2 +- hadoop-cblock/server/pom.xml | 8 +- .../apache/hadoop/cblock/CBlockManager.java | 14 +- .../org/apache/hadoop/cblock/CblockUtils.java | 4 +- .../cblock/client/CBlockVolumeClient.java | 2 +- .../cblock/jscsiHelper/BlockWriterTask.java | 6 +- ...kClientProtocolClientSideTranslatorPB.java | 2 +- .../jscsiHelper/CBlockIStorageImpl.java | 4 +- .../jscsiHelper/CBlockTargetServer.java | 4 +- .../jscsiHelper/ContainerCacheFlusher.java | 4 +- .../cblock/jscsiHelper/SCSITargetDaemon.java | 20 +- .../cache/impl/AsyncBlockWriter.java | 8 +- .../cache/impl/CBlockLocalCache.java | 4 +- .../cache/impl/SyncBlockReader.java | 8 +- .../cblock/kubernetes/DynamicProvisioner.java | 2 +- .../cblock/meta/ContainerDescriptor.java | 2 +- .../hadoop/cblock/meta/VolumeDescriptor.java | 2 +- .../cblock/proto/MountVolumeResponse.java | 2 +- ...tServerProtocolServerSideTranslatorPB.java | 2 +- .../hadoop/cblock/storage/StorageManager.java | 12 +- .../proto/CBlockClientServerProtocol.proto | 4 +- .../hadoop/cblock/TestBufferManager.java | 12 +- .../hadoop/cblock/TestCBlockReadWrite.java | 20 +- .../hadoop/cblock/TestCBlockServer.java | 4 +- .../cblock/TestCBlockServerPersistence.java | 4 +- .../hadoop/cblock/TestLocalBlockCache.java | 12 +- .../kubernetes/TestDynamicProvisioner.java | 2 +- .../cblock/util/ContainerLookUpService.java | 2 +- .../hadoop/cblock/util/MockStorageClient.java | 24 +- .../apache/hadoop/cblock/cli/CBlockCli.java | 2 +- .../apache/hadoop/cblock/TestCBlockCLI.java | 2 +- .../src/main/bin/hadoop-functions.sh | 4 +- hadoop-dist/pom.xml | 10 +- .../src/main/compose/cblock/docker-config | 2 +- .../src/main/compose/ozone/docker-config | 2 +- {hadoop-hdsl => hadoop-hdds}/client/pom.xml | 12 +- .../hadoop/hdds}/scm/XceiverClient.java | 20 +- .../hdds}/scm/XceiverClientHandler.java | 10 +- .../hdds}/scm/XceiverClientInitializer.java | 6 +- .../hdds}/scm/XceiverClientManager.java | 45 +- .../hdds}/scm/XceiverClientMetrics.java | 4 +- .../hadoop/hdds}/scm/XceiverClientRatis.java | 22 +- .../scm/client/ContainerOperationClient.java | 42 +- .../hdds/scm/client/HddsClientUtils.java | 143 +-- .../hadoop/hdds}/scm/client/package-info.java | 2 +- .../apache/hadoop/hdds}/scm/package-info.java | 2 +- .../hdds}/scm/storage/ChunkInputStream.java | 18 +- .../hdds}/scm/storage/ChunkOutputStream.java | 22 +- .../hdds}/scm/storage/package-info.java | 2 +- .../dev-support/findbugsExcludeFile.xml | 2 +- {hadoop-hdsl => hadoop-hdds}/common/pom.xml | 13 +- .../apache/hadoop/hdds/HddsConfigKeys.java | 6 + .../org/apache/hadoop/hdds/HddsUtils.java | 39 +- .../hadoop/hdds}/client/OzoneQuota.java | 2 +- .../hdds}/client/ReplicationFactor.java | 2 +- .../hadoop/hdds}/client/ReplicationType.java | 2 +- .../hadoop/hdds}/client/package-info.java | 4 +- .../hadoop/hdds}/conf/OzoneConfiguration.java | 13 +- .../hadoop/hdds}/conf/package-info.java | 2 +- .../org/apache/hadoop/hdds}/package-info.java | 4 +- .../hdds}/protocol/DatanodeDetails.java | 14 +- .../hadoop/hdds}/protocol/package-info.java | 4 +- .../hadoop/hdds}/scm/ScmConfigKeys.java | 26 +- .../org/apache/hadoop/hdds}/scm/ScmInfo.java | 2 +- .../hadoop/hdds}/scm/XceiverClientSpi.java | 14 +- .../hadoop/hdds}/scm/client/ScmClient.java | 22 +- .../hadoop/hdds}/scm/client/package-info.java | 2 +- .../hdds/scm/container}/ContainerID.java | 2 +- .../common/helpers/AllocatedBlock.java | 2 +- .../common/helpers/ContainerInfo.java | 26 +- .../common/helpers/DeleteBlockResult.java | 5 +- .../container/common/helpers/Pipeline.java | 20 +- .../common/helpers/PipelineChannel.java | 22 +- .../helpers/StorageContainerException.java | 4 +- .../common/helpers/package-info.java | 2 +- .../apache/hadoop/hdds}/scm/package-info.java | 2 +- .../hdds}/scm/protocol/LocatedContainer.java | 2 +- .../protocol/ScmBlockLocationProtocol.java | 16 +- .../hdds}/scm/protocol/ScmLocatedBlock.java | 8 +- .../StorageContainerLocationProtocol.java | 27 +- .../hdds/scm/protocol}/package-info.java | 2 +- ...ocationProtocolClientSideTranslatorPB.java | 46 +- .../ScmBlockLocationProtocolPB.java | 6 +- ...ocationProtocolClientSideTranslatorPB.java | 68 +- .../StorageContainerLocationProtocolPB.java | 6 +- .../hdds}/scm/protocolPB/package-info.java | 2 +- .../scm/storage/ContainerProtocolCalls.java | 49 +- .../hdds}/scm/storage/package-info.java | 2 +- .../org/apache/hadoop/ozone/OzoneAcl.java | 0 .../apache/hadoop/ozone/OzoneConfigKeys.java | 7 +- .../org/apache/hadoop/ozone/OzoneConsts.java | 0 .../hadoop/ozone/common/BlockGroup.java | 4 +- .../ozone/common/DeleteBlockGroupResult.java | 8 +- .../InconsistentStorageStateException.java | 6 +- .../apache/hadoop/ozone/common/Storage.java | 15 +- .../hadoop/ozone/common/StorageInfo.java | 19 +- .../hadoop/ozone/common/package-info.java | 0 .../InvalidStateTransitionException.java | 0 .../common/statemachine/StateMachine.java | 0 .../common/statemachine/package-info.java | 0 .../container/common/helpers/ChunkInfo.java | 8 +- .../container/common/helpers/KeyData.java | 8 +- .../common/helpers/package-info.java | 0 .../org/apache/hadoop/ozone/lease/Lease.java | 0 .../lease/LeaseAlreadyExistException.java | 0 .../ozone/lease/LeaseCallbackExecutor.java | 0 .../hadoop/ozone/lease/LeaseException.java | 0 .../ozone/lease/LeaseExpiredException.java | 0 .../hadoop/ozone/lease/LeaseManager.java | 0 .../LeaseManagerNotRunningException.java | 0 .../ozone/lease/LeaseNotFoundException.java | 0 .../hadoop/ozone/lease/package-info.java | 0 .../org/apache/hadoop/ozone/package-info.java | 0 ...ocationProtocolServerSideTranslatorPB.java | 51 +- ...ocationProtocolServerSideTranslatorPB.java | 72 +- .../hadoop/ozone/protocolPB/package-info.java | 0 .../hadoop/ozone/web/utils/JsonUtils.java | 6 +- .../hadoop/utils/BackgroundService.java | 16 +- .../apache/hadoop/utils/BackgroundTask.java | 0 .../hadoop/utils/BackgroundTaskQueue.java | 0 .../hadoop/utils/BackgroundTaskResult.java | 0 .../apache/hadoop/utils/BatchOperation.java | 0 .../apache/hadoop/utils/EntryConsumer.java | 0 .../org/apache/hadoop/utils/LevelDBStore.java | 10 +- .../hadoop/utils/MetadataKeyFilters.java | 0 .../apache/hadoop/utils/MetadataStore.java | 0 .../hadoop/utils/MetadataStoreBuilder.java | 23 +- .../org/apache/hadoop/utils/RocksDBStore.java | 14 +- .../hadoop/utils/RocksDBStoreMBean.java | 10 +- .../org/apache/hadoop/utils/package-info.java | 0 .../java/org/apache/ratis/RatisHelper.java | 10 +- .../java/org/apache/ratis/package-info.java | 0 .../com/google/protobuf/ShadedProtoUtil.java | 0 .../com/google/protobuf/package-info.java | 0 .../proto/DatanodeContainerProtocol.proto | 6 +- .../main/proto/ScmBlockLocationProtocol.proto | 16 +- .../StorageContainerLocationProtocol.proto | 6 +- .../common/src/main/proto/hdds.proto | 6 +- .../src/main/resources/ozone-default.xml | 10 +- .../apache/hadoop/hdds}/scm/TestArchive.java | 2 +- .../apache/hadoop/hdds}/scm/package-info.java | 2 +- .../hadoop/ozone/TestMetadataStore.java | 22 +- .../apache/hadoop/ozone/TestOzoneAcls.java | 0 .../hadoop/ozone/common/TestStateMachine.java | 12 +- .../hadoop/ozone/lease/TestLeaseManager.java | 0 .../hadoop/ozone/lease/package-info.java | 0 .../org/apache/hadoop/ozone/package-info.java | 0 .../hadoop/utils/TestRocksDBStoreMBean.java | 2 +- .../container-service/pom.xml | 16 +- .../hadoop/hdds/scm/HddsServerUtil.java | 34 +- .../apache/hadoop/hdds}/scm/VersionInfo.java | 2 +- .../hadoop/ozone/HddsDatanodeService.java | 46 +- .../container/common/helpers/ChunkUtils.java | 39 +- .../common/helpers/ContainerData.java | 22 +- .../common/helpers/ContainerMetrics.java | 10 +- .../common/helpers/ContainerReport.java | 2 +- .../common/helpers/ContainerUtils.java | 21 +- .../DeletedContainerBlocksSummary.java | 2 +- .../container/common/helpers/FileUtils.java | 2 +- .../container/common/helpers/KeyUtils.java | 13 +- .../common/helpers/package-info.java | 0 .../common/impl/ChunkManagerImpl.java | 17 +- .../common/impl/ContainerManagerImpl.java | 1113 +++++++++++++++++ .../impl/ContainerReportManagerImpl.java | 11 +- .../common/impl/ContainerStatus.java | 0 .../common/impl/ContainerStorageLocation.java | 4 +- .../container/common/impl/Dispatcher.java | 25 +- .../container/common/impl/KeyManagerImpl.java | 11 +- ...RandomContainerDeletionChoosingPolicy.java | 15 +- .../common/impl/StorageLocationReport.java | 0 ...rderedContainerDeletionChoosingPolicy.java | 17 +- .../container/common/impl/package-info.java | 0 .../common/interfaces/ChunkManager.java | 7 +- .../ContainerDeletionChoosingPolicy.java | 9 +- .../interfaces/ContainerDispatcher.java | 6 +- .../interfaces/ContainerLocationManager.java | 0 .../ContainerLocationManagerMXBean.java | 0 .../common/interfaces/ContainerManager.java | 17 +- .../interfaces/ContainerReportManager.java | 2 +- .../common/interfaces/KeyManager.java | 5 +- .../common/interfaces/package-info.java | 0 .../ozone/container/common/package-info.java | 0 .../statemachine/DatanodeStateMachine.java | 19 +- .../statemachine/EndpointStateMachine.java | 7 +- .../EndpointStateMachineMBean.java | 0 .../statemachine/SCMConnectionManager.java | 13 +- .../SCMConnectionManagerMXBean.java | 0 .../common/statemachine/StateContext.java | 16 +- .../background/BlockDeletingService.java | 21 +- .../statemachine/background/package-info.java | 0 .../commandhandler/CloseContainerHandler.java | 8 +- .../commandhandler/CommandDispatcher.java | 2 +- .../commandhandler/CommandHandler.java | 2 +- .../ContainerReportHandler.java | 12 +- .../DeleteBlocksCommandHandler.java | 23 +- .../commandhandler/package-info.java | 0 .../common/statemachine/package-info.java | 0 .../common/states/DatanodeState.java | 0 .../states/datanode/InitDatanodeState.java | 16 +- .../states/datanode/RunningDatanodeState.java | 0 .../common/states/datanode/package-info.java | 0 .../endpoint/HeartbeatEndpointTask.java | 12 +- .../states/endpoint/RegisterEndpointTask.java | 7 +- .../states/endpoint/VersionEndpointTask.java | 6 +- .../common/states/endpoint/package-info.java | 0 .../container/common/states/package-info.java | 0 .../transport/server/XceiverServer.java | 8 +- .../server/XceiverServerHandler.java | 10 +- .../server/XceiverServerInitializer.java | 3 +- .../transport/server/XceiverServerSpi.java | 4 +- .../common/transport/server/package-info.java | 0 .../server/ratis/ContainerStateMachine.java | 23 +- .../server/ratis/XceiverServerRatis.java | 11 +- .../transport/server/ratis/package-info.java | 0 .../common/utils/ContainerCache.java | 0 .../container/common/utils/package-info.java | 0 .../container/ozoneimpl/OzoneContainer.java | 37 +- .../container/ozoneimpl/package-info.java | 0 .../org/apache/hadoop/ozone/package-info.java | 0 .../StorageContainerDatanodeProtocol.java | 34 +- .../StorageContainerNodeProtocol.java | 12 +- .../ozone/protocol/VersionResponse.java | 4 +- .../commands/CloseContainerCommand.java | 7 +- .../commands/DeleteBlocksCommand.java | 9 +- .../protocol/commands/RegisteredCommand.java | 8 +- .../protocol/commands/ReregisterCommand.java | 9 +- .../ozone/protocol/commands/SCMCommand.java | 4 +- .../commands/SendContainerCommand.java | 6 +- .../ozone/protocol/commands/package-info.java | 0 .../hadoop/ozone/protocol/package-info.java | 2 +- ...atanodeProtocolClientSideTranslatorPB.java | 45 +- .../StorageContainerDatanodeProtocolPB.java | 4 +- ...atanodeProtocolServerSideTranslatorPB.java | 22 +- .../StorageContainerDatanodeProtocol.proto | 12 +- ...sun.jersey.spi.container.ContainerProvider | 0 .../container/common/ContainerTestUtils.java | 4 +- .../ozone/container/common/SCMTestUtils.java | 15 +- .../ozone/container/common/ScmTestMock.java | 31 +- .../common/TestDatanodeStateMachine.java | 26 +- .../BlockDeletingServiceTestImpl.java | 5 +- .../container/testutils/package-info.java | 0 .../framework/README.md | 4 +- .../framework/pom.xml | 14 +- .../hadoop/hdsl/server/BaseHttpServer.java | 6 +- .../hadoop/hdsl/server/ServerUtils.java | 19 +- .../hdsl/server/ServiceRuntimeInfo.java | 2 +- .../hdsl/server/ServiceRuntimeInfoImpl.java | 2 +- .../hadoop/hdsl/server/package-info.java | 4 +- .../src/main/resources/webapps/datanode/dn.js | 0 .../webapps/static/angular-1.6.4.min.js | 0 .../webapps/static/angular-nvd3-1.0.9.min.js | 0 .../webapps/static/angular-route-1.6.4.min.js | 0 .../resources/webapps/static/d3-3.5.17.min.js | 0 .../main/resources/webapps/static/dfs-dust.js | 0 .../webapps/static/nvd3-1.8.5.min.css | 0 .../webapps/static/nvd3-1.8.5.min.css.map | 0 .../webapps/static/nvd3-1.8.5.min.js | 0 .../webapps/static/nvd3-1.8.5.min.js.map | 0 .../main/resources/webapps/static/ozone.css | 0 .../main/resources/webapps/static/ozone.js | 0 .../webapps/static/templates/config.html | 0 .../webapps/static/templates/jvm.html | 0 .../webapps/static/templates/menu.html | 0 .../webapps/static/templates/overview.html | 0 .../webapps/static/templates/rpc-metrics.html | 0 .../hdsl/server/TestBaseHttpServer.java | 5 +- .../src/test/resources/ozone-site.xml | 0 {hadoop-hdsl => hadoop-hdds}/pom.xml | 7 +- .../server-scm/pom.xml | 22 +- .../apache/hadoop/hdds}/scm/SCMMXBean.java | 8 +- .../apache/hadoop/hdds}/scm/SCMStorage.java | 12 +- .../hdds}/scm/StorageContainerManager.java | 196 +-- .../StorageContainerManagerHttpServer.java | 5 +- .../hadoop/hdds}/scm/block/BlockManager.java | 12 +- .../hdds}/scm/block/BlockManagerImpl.java | 59 +- .../hdds}/scm/block/BlockmanagerMXBean.java | 2 +- .../DatanodeDeletedBlockTransactions.java | 16 +- .../hdds}/scm/block/DeletedBlockLog.java | 4 +- .../hdds}/scm/block/DeletedBlockLogImpl.java | 20 +- .../scm/block/SCMBlockDeletingService.java | 24 +- .../hadoop/hdds}/scm/block/package-info.java | 2 +- .../hdds}/scm/container/ContainerMapping.java | 86 +- .../scm/container/ContainerStateManager.java | 46 +- .../hadoop/hdds}/scm/container/Mapping.java | 17 +- .../scm/container/closer/ContainerCloser.java | 14 +- .../scm/container/closer/package-info.java | 2 +- .../hdds}/scm/container/package-info.java | 2 +- .../algorithms/ContainerPlacementPolicy.java | 4 +- .../placement/algorithms/SCMCommonPolicy.java | 14 +- .../SCMContainerPlacementCapacity.java | 10 +- .../SCMContainerPlacementRandom.java | 8 +- .../placement/algorithms/package-info.java | 2 +- .../placement/metrics/ContainerStat.java | 9 +- .../placement/metrics/DatanodeMetric.java | 4 +- .../placement/metrics/LongMetric.java | 2 +- .../container/placement/metrics/NodeStat.java | 2 +- .../placement/metrics/SCMMetrics.java | 2 +- .../placement/metrics/SCMNodeMetric.java | 2 +- .../placement/metrics/SCMNodeStat.java | 2 +- .../placement/metrics/package-info.java | 2 +- .../scm/container/placement/package-info.java | 2 +- .../replication/ContainerSupervisor.java | 32 +- .../container/replication/InProgressPool.java | 27 +- .../container/replication/PeriodicPool.java | 2 +- .../container/replication/package-info.java | 2 +- .../container/states}/ContainerAttribute.java | 7 +- .../scm/container/states}/ContainerState.java | 16 +- .../container/states}/ContainerStateMap.java | 19 +- .../scm/container/states}/package-info.java | 2 +- .../hdds}/scm/exceptions/SCMException.java | 2 +- .../hdds}/scm/exceptions/package-info.java | 2 +- .../hadoop/hdds}/scm/node/CommandQueue.java | 2 +- .../hdds}/scm/node/HeartbeatQueueItem.java | 9 +- .../hadoop/hdds}/scm/node/NodeManager.java | 10 +- .../hdds}/scm/node/NodeManagerMXBean.java | 2 +- .../hdds}/scm/node/NodePoolManager.java | 7 +- .../hadoop/hdds}/scm/node/SCMNodeManager.java | 78 +- .../hdds}/scm/node/SCMNodePoolManager.java | 31 +- .../hadoop/hdds}/scm/node/package-info.java | 2 +- .../apache/hadoop/hdds}/scm/package-info.java | 2 +- .../hdds}/scm/pipelines/PipelineManager.java | 15 +- .../hdds}/scm/pipelines/PipelineSelector.java | 32 +- .../hdds}/scm/pipelines/package-info.java | 2 +- .../scm/pipelines/ratis/RatisManagerImpl.java | 26 +- .../scm/pipelines/ratis/package-info.java | 2 +- .../standalone/StandaloneManagerImpl.java | 29 +- .../pipelines/standalone/package-info.java | 18 + .../hadoop/hdds}/scm/ratis/package-info.java | 2 +- .../src/main/webapps/scm/index.html | 0 .../server-scm/src/main/webapps/scm/main.html | 0 .../src/main/webapps/scm/scm-overview.html | 0 .../server-scm/src/main/webapps/scm/scm.js | 0 .../hadoop/hdds/scm/HddsServerUtilTest.java | 55 +- ...TestStorageContainerManagerHttpServer.java | 3 +- .../apache/hadoop/hdds}/scm/TestUtils.java | 8 +- .../hdds}/scm/block/TestBlockManager.java | 26 +- .../hdds}/scm/block/TestDeletedBlockLog.java | 27 +- .../hdds}/scm/container/MockNodeManager.java | 55 +- .../scm/container/TestContainerMapping.java | 44 +- .../container/closer/TestContainerCloser.java | 49 +- .../states}/TestContainerAttribute.java | 5 +- .../scm/node/TestContainerPlacement.java | 40 +- .../hdds}/scm/node/TestNodeManager.java | 58 +- .../scm/node/TestSCMNodePoolManager.java | 17 +- .../apache/hadoop/hdds}/scm/package-info.java | 2 +- .../ozone/container/common/TestEndPoint.java | 59 +- .../placement/TestContainerPlacement.java | 17 +- .../placement/TestDatanodeMetrics.java | 4 +- .../replication/TestContainerSupervisor.java | 39 +- .../container/replication/package-info.java | 0 .../ReplicationDatanodeStateManager.java | 15 +- .../testutils/ReplicationNodeManagerMock.java | 35 +- .../ReplicationNodePoolManagerMock.java | 6 +- .../container/testutils/package-info.java | 0 {hadoop-hdsl => hadoop-hdds}/tools/pom.xml | 15 +- .../hadoop/hdds}/scm/cli/OzoneBaseCLI.java | 2 +- .../hdds}/scm/cli/OzoneCommandHandler.java | 4 +- .../hadoop/hdds}/scm/cli/ResultCode.java | 2 +- .../apache/hadoop/hdds}/scm/cli/SCMCLI.java | 39 +- .../cli/container/CloseContainerHandler.java | 16 +- .../container/ContainerCommandHandler.java | 29 +- .../cli/container/CreateContainerHandler.java | 10 +- .../cli/container/DeleteContainerHandler.java | 12 +- .../cli/container/InfoContainerHandler.java | 21 +- .../cli/container/ListContainerHandler.java | 14 +- .../hdds}/scm/cli/container/package-info.java | 2 +- .../hadoop/hdds/scm/cli}/package-info.java | 2 +- .../hadoop/ozone/client/package-info.java | 23 - .../ContainerStates/package-info.java | 22 - .../impl/ContainerLocationManagerImpl.java | 92 -- .../pipelines/standalone/package-info.java | 18 - hadoop-ozone/acceptance-test/README.md | 2 +- .../src/test/compose/docker-config | 2 +- .../hadoop/ozone/client/ObjectStore.java | 11 +- .../hadoop/ozone/client/OzoneBucket.java | 5 +- .../ozone/client/OzoneClientFactory.java | 2 +- .../hadoop/ozone/client/OzoneVolume.java | 14 +- .../client/io/ChunkGroupInputStream.java | 14 +- .../client/io/ChunkGroupOutputStream.java | 22 +- .../client/io/OzoneContainerTranslation.java | 2 +- .../ozone/client/io/OzoneInputStream.java | 2 +- .../ozone/client/protocol/ClientProtocol.java | 6 +- .../hadoop/ozone/client/rest/RestClient.java | 22 +- .../hadoop/ozone/client/rpc/RpcClient.java | 28 +- .../hadoop/ozone/web/client/OzoneBucket.java | 19 +- .../ozone/web/client/OzoneRestClient.java | 12 +- .../hadoop/ozone/web/client/OzoneVolume.java | 6 +- ...entUtils.java => TestHddsClientUtils.java} | 10 +- hadoop-ozone/common/pom.xml | 2 +- hadoop-ozone/common/src/main/bin/oz | 2 +- .../org/apache/hadoop/ozone/KsmUtils.java | 4 +- .../client/rest/response/VolumeInfo.java | 2 +- .../hadoop/ozone/freon/OzoneGetConf.java | 6 +- .../hadoop/ozone/ksm/helpers/KsmKeyArgs.java | 4 +- .../ozone/ksm/helpers/KsmVolumeArgs.java | 2 +- .../hadoop/ozone/ksm/helpers/ServiceInfo.java | 2 +- .../hadoop/ozone/web/utils/OzoneUtils.java | 6 +- .../main/proto/KeySpaceManagerProtocol.proto | 10 +- .../src/main/shellprofile.d/hadoop-ozone.sh | 12 +- .../container/TestContainerStateManager.java | 97 +- .../hadoop/ozone/MiniOzoneClassicCluster.java | 24 +- .../apache/hadoop/ozone/MiniOzoneCluster.java | 4 +- .../hadoop/ozone/MiniOzoneTestHelper.java | 14 +- .../apache/hadoop/ozone/RatisTestHelper.java | 2 +- .../hadoop/ozone/TestContainerOperations.java | 26 +- .../hadoop/ozone/TestMiniOzoneCluster.java | 24 +- .../ozone/TestOzoneConfigurationFields.java | 2 +- .../ozone/TestStorageContainerManager.java | 41 +- .../TestStorageContainerManagerHelper.java | 4 +- .../client/rest/TestOzoneRestClient.java | 8 +- .../ozone/client/rpc/TestOzoneRpcClient.java | 24 +- .../ozone/container/ContainerTestHelper.java | 22 +- .../common/TestBlockDeletingService.java | 8 +- .../TestContainerDeletionChoosingPolicy.java | 8 +- .../common/impl/TestContainerPersistence.java | 12 +- .../TestCloseContainerHandler.java | 18 +- .../metrics/TestContainerMetrics.java | 16 +- .../ozoneimpl/TestOzoneContainer.java | 12 +- .../ozoneimpl/TestOzoneContainerRatis.java | 6 +- .../container/ozoneimpl/TestRatisManager.java | 4 +- .../container/server/TestContainerServer.java | 21 +- .../hadoop/ozone/freon/TestDataValidate.java | 2 +- .../apache/hadoop/ozone/freon/TestFreon.java | 2 +- .../ksm/TestContainerReportWithKeys.java | 16 +- .../hadoop/ozone/ksm/TestKSMMetrcis.java | 2 +- .../hadoop/ozone/ksm/TestKSMSQLCli.java | 4 +- .../hadoop/ozone/ksm/TestKeySpaceManager.java | 17 +- .../ksm/TestKeySpaceManagerRestInterface.java | 14 +- .../ozone/ksm/TestKsmBlockVersioning.java | 2 +- .../ksm/TestMultipleContainerReadWrite.java | 4 +- .../hadoop/ozone/ozShell/TestOzoneShell.java | 2 +- .../ozone/scm/TestAllocateContainer.java | 8 +- .../hadoop/ozone/scm/TestContainerSQLCli.java | 39 +- .../ozone/scm/TestContainerSmallFile.java | 30 +- .../apache/hadoop/ozone/scm/TestSCMCli.java | 42 +- .../hadoop/ozone/scm/TestSCMMXBean.java | 7 +- .../hadoop/ozone/scm/TestSCMMetrics.java | 16 +- .../ozone/scm/TestXceiverClientManager.java | 20 +- .../ozone/scm/TestXceiverClientMetrics.java | 18 +- .../hadoop/ozone/scm/node/TestQueryNode.java | 39 +- .../web/TestDistributedOzoneVolumes.java | 2 +- .../ozone/web/TestLocalOzoneVolumes.java | 2 +- .../web/TestOzoneRestWithMiniCluster.java | 2 +- .../hadoop/ozone/web/TestOzoneWebAccess.java | 2 +- .../hadoop/ozone/web/client/TestBuckets.java | 2 +- .../hadoop/ozone/web/client/TestKeys.java | 4 +- .../ozone/web/client/TestOzoneClient.java | 2 +- .../hadoop/ozone/web/client/TestVolume.java | 6 +- .../ozone/web/client/TestVolumeRatis.java | 2 +- .../server/datanode/ObjectStoreHandler.java | 14 +- .../apache/hadoop/ozone/OzoneRestUtils.java | 4 +- .../ozone/web/ObjectStoreRestPlugin.java | 12 +- .../web/handlers/StorageHandlerBuilder.java | 2 +- .../web/netty/ObjectStoreRestHttpServer.java | 36 +- .../storage/DistributedStorageHandler.java | 22 +- hadoop-ozone/ozone-manager/pom.xml | 2 +- .../apache/hadoop/ozone/ksm/KSMMXBean.java | 2 +- .../ozone/ksm/KSMMetadataManagerImpl.java | 4 +- .../apache/hadoop/ozone/ksm/KSMStorage.java | 6 +- .../hadoop/ozone/ksm/KeyDeletingService.java | 2 +- .../hadoop/ozone/ksm/KeyManagerImpl.java | 14 +- .../hadoop/ozone/ksm/KeySpaceManager.java | 46 +- .../ozone/ksm/KeySpaceManagerHttpServer.java | 2 +- .../ozone/ksm/OpenKeyCleanupService.java | 2 +- .../hadoop/ozone/ksm/VolumeManagerImpl.java | 8 +- ...ManagerProtocolServerSideTranslatorPB.java | 14 +- .../hadoop/ozone/web/ozShell/Shell.java | 2 +- .../hadoop/ozone/ksm/TestChunkStreams.java | 2 +- .../ksm/TestKeySpaceManagerHttpServer.java | 2 +- hadoop-ozone/pom.xml | 16 +- hadoop-ozone/tools/pom.xml | 6 +- .../org/apache/hadoop/ozone/freon/Freon.java | 5 +- .../genesis/BenchMarkContainerStateMap.java | 22 +- .../genesis/BenchMarkDatanodeDispatcher.java | 30 +- .../apache/hadoop/ozone/genesis/Genesis.java | 2 +- .../hadoop/ozone/genesis/GenesisUtil.java | 2 +- .../apache/hadoop/ozone/scm/cli/SQLCLI.java | 18 +- .../hadoop/ozone/scm/cli/package-info.java | 2 +- hadoop-project/pom.xml | 16 +- hadoop-tools/hadoop-ozone/pom.xml | 2 +- .../hadoop/fs/ozone/OzoneFileSystem.java | 4 +- .../fs/ozone/TestOzoneFSInputStream.java | 2 +- .../fs/ozone/contract/OzoneContract.java | 2 +- hadoop-tools/hadoop-tools-dist/pom.xml | 2 +- hadoop-tools/pom.xml | 2 +- pom.xml | 14 +- 486 files changed, 3922 insertions(+), 2721 deletions(-) rename {hadoop-hdsl => hadoop-hdds}/client/pom.xml (82%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClient.java (91%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClientHandler.java (96%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClientInitializer.java (93%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClientManager.java (89%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClientMetrics.java (97%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClientRatis.java (93%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/client/ContainerOperationClient.java (91%) rename hadoop-hdsl/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientUtils.java => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java (96%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/client/package-info.java (95%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/package-info.java (96%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/storage/ChunkInputStream.java (96%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/storage/ChunkOutputStream.java (93%) rename {hadoop-hdsl/client/src/main/java/org/apache/hadoop => hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds}/scm/storage/package-info.java (95%) rename {hadoop-hdsl => hadoop-hdds}/common/dev-support/findbugsExcludeFile.xml (93%) rename {hadoop-hdsl => hadoop-hdds}/common/pom.xml (93%) create mode 100644 hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java rename hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java (96%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/client/OzoneQuota.java (99%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/client/ReplicationFactor.java (97%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/client/ReplicationType.java (95%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/client/package-info.java (88%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/conf/OzoneConfiguration.java (99%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/conf/package-info.java (95%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/package-info.java (90%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/protocol/DatanodeDetails.java (96%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/protocol/package-info.java (89%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/ScmConfigKeys.java (94%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/ScmInfo.java (98%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/XceiverClientSpi.java (89%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/client/ScmClient.java (86%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/client/package-info.java (95%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/ContainerStates => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container}/ContainerID.java (97%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/AllocatedBlock.java (97%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/ContainerInfo.java (92%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/DeleteBlockResult.java (92%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/Pipeline.java (92%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/PipelineChannel.java (83%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/StorageContainerException.java (96%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/container/common/helpers/package-info.java (93%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/package-info.java (96%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocol/LocatedContainer.java (98%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocol/ScmBlockLocationProtocol.java (89%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocol/ScmLocatedBlock.java (98%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocol/StorageContainerLocationProtocol.java (82%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol}/package-info.java (94%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java (82%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocolPB/ScmBlockLocationProtocolPB.java (92%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java (80%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocolPB/StorageContainerLocationProtocolPB.java (88%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/protocolPB/package-info.java (95%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/storage/ContainerProtocolCalls.java (91%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop => hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds}/scm/storage/package-info.java (95%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java (98%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java (96%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java (91%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java (99%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java (98%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java (95%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java (95%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java (76%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java (73%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java (95%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java (92%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/hadoop/utils/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/ratis/RatisHelper.java (97%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/ratis/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/proto/DatanodeContainerProtocol.proto (99%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/proto/ScmBlockLocationProtocol.proto (92%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/proto/StorageContainerLocationProtocol.proto (98%) rename hadoop-hdsl/common/src/main/proto/hdsl.proto => hadoop-hdds/common/src/main/proto/hdds.proto (97%) rename {hadoop-hdsl => hadoop-hdds}/common/src/main/resources/ozone-default.xml (99%) rename {hadoop-hdsl/common/src/test/java/org/apache/hadoop => hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds}/scm/TestArchive.java (99%) rename {hadoop-hdsl/common/src/test/java/org/apache/hadoop => hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds}/scm/package-info.java (95%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java (99%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java (97%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/ozone/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/pom.xml (87%) rename hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java => hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java (89%) rename {hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds}/scm/VersionInfo.java (98%) rename hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java => hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java (86%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java (91%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java (95%) create mode 100644 hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java (92%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java (95%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java (92%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java (94%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java (91%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java (94%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java (88%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java (94%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java (94%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java (89%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java (90%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java (91%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java (91%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java (95%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java (92%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java (90%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java (68%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java (92%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java (84%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java (91%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java (96%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java (93%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java (85%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java (90%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java (81%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java (100%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java (97%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java (88%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java (95%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java (98%) rename {hadoop-hdsl => hadoop-hdds}/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/README.md (95%) rename {hadoop-hdsl => hadoop-hdds}/framework/pom.xml (89%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java (97%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java (97%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java (98%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java (97%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java (88%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/datanode/dn.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/angular-1.6.4.min.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/d3-3.5.17.min.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/dfs-dust.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/ozone.css (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/ozone.js (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/templates/config.html (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/templates/jvm.html (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/templates/menu.html (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/templates/overview.html (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/main/resources/webapps/static/templates/rpc-metrics.html (100%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java (96%) rename {hadoop-hdsl => hadoop-hdds}/framework/src/test/resources/ozone-site.xml (100%) rename {hadoop-hdsl => hadoop-hdds}/pom.xml (95%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/pom.xml (88%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/SCMMXBean.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/SCMStorage.java (90%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/StorageContainerManager.java (87%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/StorageContainerManagerHttpServer.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/BlockManager.java (87%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/BlockManagerImpl.java (92%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/BlockmanagerMXBean.java (95%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/DatanodeDeletedBlockTransactions.java (92%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/DeletedBlockLog.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/DeletedBlockLogImpl.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/SCMBlockDeletingService.java (90%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/block/package-info.java (95%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/ContainerMapping.java (90%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/ContainerStateManager.java (91%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/Mapping.java (87%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/closer/ContainerCloser.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/closer/package-info.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/package-info.java (95%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/algorithms/ContainerPlacementPolicy.java (92%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/algorithms/SCMCommonPolicy.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/algorithms/SCMContainerPlacementRandom.java (92%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/algorithms/package-info.java (92%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/ContainerStat.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/DatanodeMetric.java (95%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/LongMetric.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/NodeStat.java (96%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/SCMMetrics.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/SCMNodeMetric.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/SCMNodeStat.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/metrics/package-info.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/placement/package-info.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/replication/ContainerSupervisor.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/replication/InProgressPool.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/replication/PeriodicPool.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/container/replication/package-info.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states}/ContainerAttribute.java (97%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states}/ContainerState.java (83%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states}/ContainerStateMap.java (95%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states}/package-info.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/exceptions/SCMException.java (98%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/exceptions/package-info.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/CommandQueue.java (99%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/HeartbeatQueueItem.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/NodeManager.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/NodeManagerMXBean.java (97%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/NodePoolManager.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/SCMNodeManager.java (94%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/SCMNodePoolManager.java (91%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/node/package-info.java (96%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/package-info.java (95%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/pipelines/PipelineManager.java (92%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/pipelines/PipelineSelector.java (89%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/pipelines/package-info.java (97%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/pipelines/ratis/RatisManagerImpl.java (85%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/pipelines/ratis/package-info.java (93%) rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/pipelines/standalone/StandaloneManagerImpl.java (85%) create mode 100644 hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java rename {hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds}/scm/ratis/package-info.java (95%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/main/webapps/scm/index.html (100%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/main/webapps/scm/main.html (100%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/main/webapps/scm/scm-overview.html (100%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/main/webapps/scm/scm.js (100%) rename hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/HdslServerUtilTest.java => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java (90%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/TestStorageContainerManagerHttpServer.java (98%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/TestUtils.java (95%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/block/TestBlockManager.java (87%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/block/TestDeletedBlockLog.java (93%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/container/MockNodeManager.java (91%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/container/TestContainerMapping.java (91%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/container/closer/TestContainerCloser.java (83%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/ContainerStates => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states}/TestContainerAttribute.java (96%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/node/TestContainerPlacement.java (84%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/node/TestNodeManager.java (97%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/node/TestSCMNodePoolManager.java (92%) rename {hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone => hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds}/scm/package-info.java (95%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java (95%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java (90%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java (93%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java (91%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java (87%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java (91%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java (95%) rename {hadoop-hdsl => hadoop-hdds}/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java (100%) rename {hadoop-hdsl => hadoop-hdds}/tools/pom.xml (84%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/OzoneBaseCLI.java (97%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/OzoneCommandHandler.java (96%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/ResultCode.java (96%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/SCMCLI.java (84%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/CloseContainerHandler.java (84%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/ContainerCommandHandler.java (84%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/CreateContainerHandler.java (90%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/DeleteContainerHandler.java (89%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/InfoContainerHandler.java (85%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/ListContainerHandler.java (89%) rename {hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds}/scm/cli/container/package-info.java (94%) rename {hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol => hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli}/package-info.java (95%) delete mode 100644 hadoop-hdsl/client/src/main/java/org/apache/hadoop/ozone/client/package-info.java delete mode 100644 hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/ContainerStates/package-info.java delete mode 100644 hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerLocationManagerImpl.java delete mode 100644 hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/standalone/package-info.java rename hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/{TestOzoneClientUtils.java => TestHddsClientUtils.java} (92%) rename hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/{ozone => hdds}/scm/container/TestContainerStateManager.java (80%) diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml index bafa12b80df..634c526a406 100644 --- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml +++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml @@ -51,7 +51,7 @@ **/file:/** **/SecurityAuth.audit* hadoop-ozone/** - hadoop-hdsl/** + hadoop-hdds/** hadoop-cblock/** diff --git a/hadoop-cblock/server/pom.xml b/hadoop-cblock/server/pom.xml index 3fa197d367a..8039dad0725 100644 --- a/hadoop-cblock/server/pom.xml +++ b/hadoop-cblock/server/pom.xml @@ -37,7 +37,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework @@ -47,12 +47,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common org.apache.hadoop - hadoop-hdsl-client + hadoop-hdds-client @@ -132,7 +132,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ - ${basedir}/../../hadoop-hdsl/common/src/main/proto/ + ${basedir}/../../hadoop-hdds/common/src/main/proto/ ${basedir}/src/main/proto diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CBlockManager.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CBlockManager.java index 12b505a90c5..9318b6c8648 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CBlockManager.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CBlockManager.java @@ -37,24 +37,24 @@ import org.apache.hadoop.cblock.protocolPB import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.client.ContainerOperationClient; -import org.apache.hadoop.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.client.ContainerOperationClient; +import org.apache.hadoop.hdds.scm.client.ScmClient; import org.apache.hadoop.cblock.storage.StorageManager; import org.apache.hadoop.cblock.util.KeyUtil; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.scm.protocolPB +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.protocolPB .StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.utils.LevelDBStore; import static org.apache.hadoop.cblock.CblockUtils.getCblockServerRpcAddr; import static org.apache.hadoop.cblock.CblockUtils.getCblockServiceRpcAddr; -import static org.apache.hadoop.hdsl.server.ServerUtils +import static org.apache.hadoop.hdds.server.ServerUtils .updateRPCListenAddress; import org.iq80.leveldb.DBIterator; import org.slf4j.Logger; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CblockUtils.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CblockUtils.java index 99ffde0b14a..f0f1d05a495 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CblockUtils.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CblockUtils.java @@ -36,8 +36,8 @@ import static org.apache.hadoop.cblock.CBlockConfigKeys .DFS_CBLOCK_SERVICERPC_HOSTNAME_DEFAULT; import static org.apache.hadoop.cblock.CBlockConfigKeys .DFS_CBLOCK_SERVICERPC_PORT_DEFAULT; -import static org.apache.hadoop.hdsl.HdslUtils.getHostNameFromConfigKeys; -import static org.apache.hadoop.hdsl.HdslUtils.getPortNumberFromConfigKeys; +import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys; +import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys; /** * Generic stateless utility functions for CBlock components. diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/client/CBlockVolumeClient.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/client/CBlockVolumeClient.java index 4a3878abf1a..9227a281d68 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/client/CBlockVolumeClient.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/client/CBlockVolumeClient.java @@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.cblock.CBlockConfigKeys; import org.apache.hadoop.cblock.meta.VolumeInfo; import org.apache.hadoop.cblock.protocolPB.CBlockServiceProtocolPB; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/BlockWriterTask.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/BlockWriterTask.java index 04fe3a4544d..f2d289e907a 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/BlockWriterTask.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/BlockWriterTask.java @@ -21,9 +21,9 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.cblock.jscsiHelper.cache.impl.AsyncBlockWriter; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.LevelDBStore; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockClientProtocolClientSideTranslatorPB.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockClientProtocolClientSideTranslatorPB.java index 84b68e35e0d..0b6c5f31df0 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockClientProtocolClientSideTranslatorPB.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockClientProtocolClientSideTranslatorPB.java @@ -39,7 +39,7 @@ import org.apache.hadoop.cblock.protocolPB.CBlockClientServerProtocolPB; import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import java.io.Closeable; import java.io.IOException; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockIStorageImpl.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockIStorageImpl.java index 2f356688896..4744968b6d7 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockIStorageImpl.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockIStorageImpl.java @@ -23,8 +23,8 @@ import org.apache.hadoop.cblock.jscsiHelper.cache.CacheModule; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.jscsi.target.storage.IStorageModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockTargetServer.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockTargetServer.java index 75e013e4a5c..afbd2606b3a 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockTargetServer.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockTargetServer.java @@ -20,8 +20,8 @@ package org.apache.hadoop.cblock.jscsiHelper; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.cblock.proto.MountVolumeResponse; import org.apache.hadoop.cblock.util.KeyUtil; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.scm.XceiverClientManager; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.XceiverClientManager; import org.jscsi.target.Configuration; import org.jscsi.target.Target; import org.jscsi.target.TargetServer; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/ContainerCacheFlusher.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/ContainerCacheFlusher.java index 292662e6d03..171f3e265ed 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/ContainerCacheFlusher.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/ContainerCacheFlusher.java @@ -26,8 +26,8 @@ import org.apache.hadoop.cblock.jscsiHelper.cache.impl.AsyncBlockWriter; import org.apache.hadoop.cblock.jscsiHelper.cache.impl.DiskBlock; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.LevelDBStore; import org.iq80.leveldb.Options; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/SCSITargetDaemon.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/SCSITargetDaemon.java index f164f381347..3806d8ba0fd 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/SCSITargetDaemon.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/SCSITargetDaemon.java @@ -31,9 +31,9 @@ import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.scm.client.ContainerOperationClient; +import org.apache.hadoop.hdds.scm.client.ContainerOperationClient; import org.apache.hadoop.security.UserGroupInformation; import org.jscsi.target.Configuration; @@ -47,14 +47,14 @@ import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_PORT_DE import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_PORT_KEY; import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_SERVER_ADDRESS_DEFAULT; import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_SERVER_ADDRESS_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_KEY; /** * This class runs the target server process. diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/AsyncBlockWriter.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/AsyncBlockWriter.java index 992578fa83d..0192c3859f1 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/AsyncBlockWriter.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/AsyncBlockWriter.java @@ -22,10 +22,10 @@ import com.google.common.primitives.Longs; import org.apache.commons.codec.digest.DigestUtils; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.LevelDBStore; import org.slf4j.Logger; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/CBlockLocalCache.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/CBlockLocalCache.java index 1149164f946..ec5a4c98b59 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/CBlockLocalCache.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/CBlockLocalCache.java @@ -24,8 +24,8 @@ import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher; import org.apache.hadoop.cblock.jscsiHelper.cache.CacheModule; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics; import org.apache.hadoop.utils.LevelDBStore; import org.slf4j.Logger; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/SyncBlockReader.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/SyncBlockReader.java index 7d6e1728d22..557b2010423 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/SyncBlockReader.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/SyncBlockReader.java @@ -22,10 +22,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.commons.codec.digest.DigestUtils; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.concurrent.HadoopThreadPoolExecutor; import org.apache.hadoop.utils.LevelDBStore; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/kubernetes/DynamicProvisioner.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/kubernetes/DynamicProvisioner.java index 72b215b9b32..e21966b41ee 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/kubernetes/DynamicProvisioner.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/kubernetes/DynamicProvisioner.java @@ -38,7 +38,7 @@ import org.apache.hadoop.cblock.CblockUtils; import org.apache.hadoop.cblock.exception.CBlockException; import org.apache.hadoop.cblock.proto.MountVolumeResponse; import org.apache.hadoop.cblock.storage.StorageManager; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.ratis.shaded.com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/ContainerDescriptor.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/ContainerDescriptor.java index 00064a6dc5e..2c312242578 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/ContainerDescriptor.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/ContainerDescriptor.java @@ -19,7 +19,7 @@ package org.apache.hadoop.cblock.meta; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.cblock.protocol.proto.CBlockClientServerProtocolProtos; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; /** * diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/VolumeDescriptor.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/VolumeDescriptor.java index 4f5930d98c1..930741db171 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/VolumeDescriptor.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/VolumeDescriptor.java @@ -19,7 +19,7 @@ package org.apache.hadoop.cblock.meta; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.cblock.protocol.proto.CBlockClientServerProtocolProtos; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/proto/MountVolumeResponse.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/proto/MountVolumeResponse.java index 70ccd90e72f..d33337f1f9e 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/proto/MountVolumeResponse.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/proto/MountVolumeResponse.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.cblock.proto; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import java.util.HashMap; import java.util.List; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/protocolPB/CBlockClientServerProtocolServerSideTranslatorPB.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/protocolPB/CBlockClientServerProtocolServerSideTranslatorPB.java index bfe21308221..f937a738c12 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/protocolPB/CBlockClientServerProtocolServerSideTranslatorPB.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/protocolPB/CBlockClientServerProtocolServerSideTranslatorPB.java @@ -25,7 +25,7 @@ import org.apache.hadoop.cblock.proto.MountVolumeResponse; import org.apache.hadoop.cblock.protocol.proto.CBlockClientServerProtocolProtos; import org.apache.hadoop.cblock.protocol.proto.CBlockServiceProtocolProtos; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import java.io.IOException; import java.util.HashMap; diff --git a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/storage/StorageManager.java b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/storage/StorageManager.java index b9ec4620be5..c6c6a787598 100644 --- a/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/storage/StorageManager.java +++ b/hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/storage/StorageManager.java @@ -25,10 +25,10 @@ import org.apache.hadoop.cblock.meta.VolumeDescriptor; import org.apache.hadoop.cblock.meta.VolumeInfo; import org.apache.hadoop.cblock.proto.MountVolumeResponse; import org.apache.hadoop.cblock.util.KeyUtil; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.client.ScmClient; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -187,8 +187,8 @@ public class StorageManager { ContainerDescriptor container = null; try { Pipeline pipeline = storageClient.createContainer( - HdslProtos.ReplicationType.STAND_ALONE, - HdslProtos.ReplicationFactor.ONE, + HddsProtos.ReplicationType.STAND_ALONE, + HddsProtos.ReplicationFactor.ONE, KeyUtil.getContainerName(volume.getUserName(), volume.getVolumeName(), containerIdx), cblockId); diff --git a/hadoop-cblock/server/src/main/proto/CBlockClientServerProtocol.proto b/hadoop-cblock/server/src/main/proto/CBlockClientServerProtocol.proto index 160b254d05e..45d0de91e6c 100644 --- a/hadoop-cblock/server/src/main/proto/CBlockClientServerProtocol.proto +++ b/hadoop-cblock/server/src/main/proto/CBlockClientServerProtocol.proto @@ -27,7 +27,7 @@ option java_generic_services = true; option java_generate_equals_and_hash = true; package hadoop.cblock; -import "hdsl.proto"; +import "hdds.proto"; import "CBlockServiceProtocol.proto"; /** * This message is sent from CBlock client side to CBlock server to @@ -69,7 +69,7 @@ message ContainerIDProto { required string containerID = 1; required uint64 index = 2; // making pipeline optional to be compatible with exisiting tests - optional hadoop.hdsl.Pipeline pipeline = 3; + optional hadoop.hdds.Pipeline pipeline = 3; } diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestBufferManager.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestBufferManager.java index e1eb36f2bed..50c4ba88316 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestBufferManager.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestBufferManager.java @@ -23,13 +23,13 @@ import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics; import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher; import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.MiniOzoneClassicCluster; diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockReadWrite.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockReadWrite.java index d995ba63c79..fb58a4e7c39 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockReadWrite.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockReadWrite.java @@ -24,21 +24,21 @@ import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics; import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ozone.MiniOzoneClassicCluster; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocolPB +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocolPB .StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Assert; diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServer.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServer.java index 90fe8023036..386c9b26821 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServer.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServer.java @@ -19,8 +19,8 @@ package org.apache.hadoop.cblock; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.cblock.meta.VolumeInfo; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.scm.client.ScmClient; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.client.ScmClient; import org.apache.hadoop.cblock.util.MockStorageClient; import org.junit.After; import org.junit.Before; diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServerPersistence.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServerPersistence.java index 1ea183b5e26..db13972e30c 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServerPersistence.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServerPersistence.java @@ -18,8 +18,8 @@ package org.apache.hadoop.cblock; import org.apache.hadoop.cblock.meta.VolumeDescriptor; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.scm.client.ScmClient; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.client.ScmClient; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.cblock.util.MockStorageClient; import org.junit.Test; diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestLocalBlockCache.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestLocalBlockCache.java index 6eb7ea67624..e1e2909ac16 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestLocalBlockCache.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestLocalBlockCache.java @@ -25,16 +25,16 @@ import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics; import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher; import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock; import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ozone.MiniOzoneClassicCluster; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.junit.AfterClass; diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/kubernetes/TestDynamicProvisioner.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/kubernetes/TestDynamicProvisioner.java index 8d1a8654c7c..0268ccc4f3d 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/kubernetes/TestDynamicProvisioner.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/kubernetes/TestDynamicProvisioner.java @@ -29,7 +29,7 @@ import org.junit.Test; import java.nio.file.Files; import java.nio.file.Paths; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; /** * Test the resource generation of Dynamic Provisioner. diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/ContainerLookUpService.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/ContainerLookUpService.java index 8cb57d6d1ea..d7dabe300c3 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/ContainerLookUpService.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/ContainerLookUpService.java @@ -19,7 +19,7 @@ package org.apache.hadoop.cblock.util; import org.apache.hadoop.cblock.meta.ContainerDescriptor; import org.apache.hadoop.ozone.container.ContainerTestHelper; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import java.io.IOException; import java.util.concurrent.ConcurrentHashMap; diff --git a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/MockStorageClient.java b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/MockStorageClient.java index 59c8e018ff2..9fa76a859e4 100644 --- a/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/MockStorageClient.java +++ b/hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/MockStorageClient.java @@ -18,12 +18,12 @@ package org.apache.hadoop.cblock.util; import org.apache.hadoop.cblock.meta.ContainerDescriptor; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.client.ScmClient; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import java.io.IOException; import java.util.ArrayList; @@ -88,7 +88,7 @@ public class MockStorageClient implements ScmClient { ContainerInfo container = new ContainerInfo.Builder() .setContainerName(containerDescriptor.getContainerID()) .setPipeline(containerDescriptor.getPipeline()) - .setState(HdslProtos.LifeCycleState.ALLOCATED) + .setState(HddsProtos.LifeCycleState.ALLOCATED) .build(); containerList.add(container); return containerList; @@ -134,8 +134,8 @@ public class MockStorageClient implements ScmClient { } @Override - public Pipeline createContainer(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor replicationFactor, String containerId, + public Pipeline createContainer(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor replicationFactor, String containerId, String owner) throws IOException { int contId = currentContainerId.getAndIncrement(); ContainerLookUpService.addContainer(Long.toString(contId)); @@ -153,8 +153,8 @@ public class MockStorageClient implements ScmClient { * @throws IOException */ @Override - public HdslProtos.NodePool queryNode(EnumSet - nodeStatuses, HdslProtos.QueryScope queryScope, String poolName) + public HddsProtos.NodePool queryNode(EnumSet + nodeStatuses, HddsProtos.QueryScope queryScope, String poolName) throws IOException { return null; } @@ -168,8 +168,8 @@ public class MockStorageClient implements ScmClient { * @throws IOException */ @Override - public Pipeline createReplicationPipeline(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool) + public Pipeline createReplicationPipeline(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool) throws IOException { return null; } diff --git a/hadoop-cblock/tools/src/main/java/org/apache/hadoop/cblock/cli/CBlockCli.java b/hadoop-cblock/tools/src/main/java/org/apache/hadoop/cblock/cli/CBlockCli.java index 224c908d59f..c6c0e849782 100644 --- a/hadoop-cblock/tools/src/main/java/org/apache/hadoop/cblock/cli/CBlockCli.java +++ b/hadoop-cblock/tools/src/main/java/org/apache/hadoop/cblock/cli/CBlockCli.java @@ -32,7 +32,7 @@ import org.apache.hadoop.cblock.protocolPB.CBlockServiceProtocolPB; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; diff --git a/hadoop-cblock/tools/src/test/org/apache/hadoop/cblock/TestCBlockCLI.java b/hadoop-cblock/tools/src/test/org/apache/hadoop/cblock/TestCBlockCLI.java index f8b05ed6844..a3f53aa605b 100644 --- a/hadoop-cblock/tools/src/test/org/apache/hadoop/cblock/TestCBlockCLI.java +++ b/hadoop-cblock/tools/src/test/org/apache/hadoop/cblock/TestCBlockCLI.java @@ -21,7 +21,7 @@ import org.apache.hadoop.cblock.cli.CBlockCli; import org.apache.hadoop.cblock.meta.VolumeDescriptor; import org.apache.hadoop.cblock.util.MockStorageClient; import org.apache.hadoop.conf.OzoneConfiguration; -import org.apache.hadoop.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.client.ScmClient; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.AfterClass; diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh index 59ee7b5ada7..960bc635421 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh @@ -596,8 +596,8 @@ function hadoop_bootstrap YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"} MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"} MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"} - HDSL_DIR=${HDSL_DIR:-"share/hadoop/hdsl"} - HDSL_LIB_JARS_DIR=${HDSL_LIB_JARS_DIR:-"share/hadoop/hdsl/lib"} + HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"} + HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"} OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"} OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"} CBLOCK_DIR=${CBLOCK_DIR:-"share/hadoop/cblock"} diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml index 98a9edf709b..300355064d4 100644 --- a/hadoop-dist/pom.xml +++ b/hadoop-dist/pom.xml @@ -219,7 +219,7 @@ - hdsl + hdds false @@ -231,11 +231,11 @@ org.apache.hadoop - hadoop-hdsl-server-scm + hadoop-hdds-server-scm org.apache.hadoop - hadoop-hdsl-tools + hadoop-hdds-tools org.apache.hadoop @@ -243,7 +243,7 @@ org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service org.apache.hadoop @@ -251,7 +251,7 @@ org.apache.hadoop - hadoop-hdsl-tools + hadoop-hdds-tools org.apache.hadoop diff --git a/hadoop-dist/src/main/compose/cblock/docker-config b/hadoop-dist/src/main/compose/cblock/docker-config index da0c2ace808..4690de0fdc6 100644 --- a/hadoop-dist/src/main/compose/cblock/docker-config +++ b/hadoop-dist/src/main/compose/cblock/docker-config @@ -27,7 +27,7 @@ OZONE-SITE.XML_ozone.scm.client.address=scm OZONE-SITE.XML_dfs.cblock.jscsi.cblock.server.address=cblock OZONE-SITE.XML_dfs.cblock.scm.ipaddress=scm OZONE-SITE.XML_dfs.cblock.service.leveldb.path=/tmp -HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HdslDatanodeService +HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HddsDatanodeService HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000 HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode HDFS-SITE.XML_rpc.metrics.quantile.enable=true diff --git a/hadoop-dist/src/main/compose/ozone/docker-config b/hadoop-dist/src/main/compose/ozone/docker-config index d297b191796..8e5efa961fe 100644 --- a/hadoop-dist/src/main/compose/ozone/docker-config +++ b/hadoop-dist/src/main/compose/ozone/docker-config @@ -27,7 +27,7 @@ HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000 HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode HDFS-SITE.XML_rpc.metrics.quantile.enable=true HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300 -HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HdslDatanodeService +HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HddsDatanodeService LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout diff --git a/hadoop-hdsl/client/pom.xml b/hadoop-hdds/client/pom.xml similarity index 82% rename from hadoop-hdsl/client/pom.xml rename to hadoop-hdds/client/pom.xml index 1f1eaf0e898..95ff09abb05 100644 --- a/hadoop-hdsl/client/pom.xml +++ b/hadoop-hdds/client/pom.xml @@ -19,24 +19,24 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.apache.hadoop - hadoop-hdsl + hadoop-hdds 3.2.0-SNAPSHOT - hadoop-hdsl-client + hadoop-hdds-client 3.2.0-SNAPSHOT - Apache Hadoop HDSL Client libraries - Apache Hadoop HDSL Client + Apache Hadoop Distributed Data Store Client libraries + Apache HDDS Client jar - hdsl + hdds true org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java similarity index 91% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java index 06b1e998a27..5c702c63b61 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; @@ -28,19 +28,19 @@ import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.logging.LogLevel; import io.netty.handler.logging.LoggingHandler; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.scm.client.HddsClientUtils; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.ozone.client.OzoneClientUtils; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; -import java.util.List; import java.util.concurrent.Semaphore; /** @@ -69,7 +69,7 @@ public class XceiverClient extends XceiverClientSpi { this.pipeline = pipeline; this.config = config; this.semaphore = - new Semaphore(OzoneClientUtils.getMaxOutstandingRequests(config)); + new Semaphore(HddsClientUtils.getMaxOutstandingRequests(config)); } @Override @@ -186,7 +186,7 @@ public class XceiverClient extends XceiverClientSpi { * @return - Stand Alone as the type. */ @Override - public HdslProtos.ReplicationType getPipelineType() { - return HdslProtos.ReplicationType.STAND_ALONE; + public HddsProtos.ReplicationType getPipelineType() { + return HddsProtos.ReplicationType.STAND_ALONE; } } diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java similarity index 96% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java index 4b2d6c44f7b..e2b55ac7e8e 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java @@ -15,19 +15,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import com.google.common.base.Preconditions; - import io.netty.channel.Channel; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandResponseProto; - -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientInitializer.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java similarity index 93% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientInitializer.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java index c70a6862b0d..e10a9f63965 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientInitializer.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; @@ -24,8 +24,8 @@ import io.netty.handler.codec.protobuf.ProtobufDecoder; import io.netty.handler.codec.protobuf.ProtobufEncoder; import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import java.util.concurrent.Semaphore; diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientManager.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java similarity index 89% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientManager.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java index 3f62a3a1edc..75851042c29 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientManager.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java @@ -16,33 +16,32 @@ * limitations under the License. */ -package org.apache.hadoop.scm; - -import java.io.Closeable; -import java.io.IOException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.Callable; +package org.apache.hadoop.hdds.scm; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; - import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; -import static org.apache.hadoop.scm.ScmConfigKeys - .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys - .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys - .SCM_CONTAINER_CLIENT_MAX_SIZE_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .SCM_CONTAINER_CLIENT_MAX_SIZE_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_KEY; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos .ReplicationType.RATIS; /** @@ -186,24 +185,24 @@ public class XceiverClientManager implements Closeable { * Returns hard coded 3 as replication factor. * @return 3 */ - public HdslProtos.ReplicationFactor getFactor() { + public HddsProtos.ReplicationFactor getFactor() { if(isUseRatis()) { - return HdslProtos.ReplicationFactor.THREE; + return HddsProtos.ReplicationFactor.THREE; } - return HdslProtos.ReplicationFactor.ONE; + return HddsProtos.ReplicationFactor.ONE; } /** * Returns the default replication type. * @return Ratis or Standalone */ - public HdslProtos.ReplicationType getType() { + public HddsProtos.ReplicationType getType() { // TODO : Fix me and make Ratis default before release. // TODO: Remove this as replication factor and type are pipeline properties if(isUseRatis()) { - return HdslProtos.ReplicationType.RATIS; + return HddsProtos.ReplicationType.RATIS; } - return HdslProtos.ReplicationType.STAND_ALONE; + return HddsProtos.ReplicationType.STAND_ALONE; } /** diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientMetrics.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java similarity index 97% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientMetrics.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java index bcece237172..a61eba142ab 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientMetrics.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java @@ -15,10 +15,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java similarity index 93% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java index 084e3e55a1d..d010c6913f8 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java @@ -16,17 +16,19 @@ * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import com.google.common.base.Preconditions; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.ozone.client.OzoneClientUtils; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.client.HddsClientUtils; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandResponseProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.ratis.RatisHelper; import org.apache.ratis.client.RaftClient; import org.apache.ratis.protocol.RaftClientReply; @@ -60,7 +62,7 @@ public final class XceiverClientRatis extends XceiverClientSpi { ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY, ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT); final int maxOutstandingRequests = - OzoneClientUtils.getMaxOutstandingRequests(ozoneConf); + HddsClientUtils.getMaxOutstandingRequests(ozoneConf); return new XceiverClientRatis(pipeline, SupportedRpcType.valueOfIgnoreCase(rpcType), maxOutstandingRequests); } @@ -98,8 +100,8 @@ public final class XceiverClientRatis extends XceiverClientSpi { * @return - Ratis */ @Override - public HdslProtos.ReplicationType getPipelineType() { - return HdslProtos.ReplicationType.RATIS; + public HddsProtos.ReplicationType getPipelineType() { + return HddsProtos.ReplicationType.RATIS; } private void reinitialize(List datanodes, RaftGroup group) diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/ContainerOperationClient.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java similarity index 91% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/ContainerOperationClient.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java index 08ddfd69846..8f30a7fad10 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/ContainerOperationClient.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java @@ -15,20 +15,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.client; +package org.apache.hadoop.hdds.scm.client; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ReadContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocolPB +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocolPB .StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ReadContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,8 +39,10 @@ import java.util.EnumSet; import java.util.List; import java.util.UUID; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState.ALLOCATED; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState.OPEN; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState + .ALLOCATED; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState + .OPEN; /** * This class provides the client-facing APIs of container operations. @@ -189,8 +193,8 @@ public class ContainerOperationClient implements ScmClient { * @inheritDoc */ @Override - public Pipeline createContainer(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, + public Pipeline createContainer(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, String containerId, String owner) throws IOException { XceiverClientSpi client = null; try { @@ -229,8 +233,8 @@ public class ContainerOperationClient implements ScmClient { * @throws IOException */ @Override - public HdslProtos.NodePool queryNode(EnumSet - nodeStatuses, HdslProtos.QueryScope queryScope, String poolName) + public HddsProtos.NodePool queryNode(EnumSet + nodeStatuses, HddsProtos.QueryScope queryScope, String poolName) throws IOException { return storageContainerLocationClient.queryNode(nodeStatuses, queryScope, poolName); @@ -240,8 +244,8 @@ public class ContainerOperationClient implements ScmClient { * Creates a specified replication pipeline. */ @Override - public Pipeline createReplicationPipeline(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool) + public Pipeline createReplicationPipeline(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool) throws IOException { return storageContainerLocationClient.createReplicationPipeline(type, factor, nodePool); diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientUtils.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java similarity index 96% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientUtils.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java index c77f9656c4a..bc5f8d65b46 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientUtils.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java @@ -16,7 +16,20 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.client; +package org.apache.hadoop.hdds.scm.client; + +import com.google.common.base.Preconditions; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.ozone.OzoneConfigKeys; +import org.apache.hadoop.ozone.OzoneConsts; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.text.ParseException; import java.time.Instant; @@ -25,21 +38,6 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.scm.ScmConfigKeys; - -import com.google.common.base.Preconditions; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Utility methods for Ozone and Container Clients. * @@ -49,14 +47,14 @@ import org.slf4j.LoggerFactory; */ @InterfaceAudience.Public @InterfaceStability.Unstable -public final class OzoneClientUtils { +public final class HddsClientUtils { private static final Logger LOG = LoggerFactory.getLogger( - OzoneClientUtils.class); + HddsClientUtils.class); private static final int NO_PORT = -1; - private OzoneClientUtils() { + private HddsClientUtils() { } /** @@ -69,55 +67,28 @@ public final class OzoneClientUtils { return format.withZone(ZoneId.of(OzoneConsts.OZONE_TIME_ZONE)); }); + /** - * Returns the cache value to be used for list calls. - * @param conf Configuration object - * @return list cache size + * Convert time in millisecond to a human readable format required in ozone. + * @return a human readable string for the input time */ - public static int getListCacheSize(Configuration conf) { - return conf.getInt(OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE, - OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE_DEFAULT); + public static String formatDateTime(long millis) { + ZonedDateTime dateTime = ZonedDateTime.ofInstant( + Instant.ofEpochSecond(millis), DATE_FORMAT.get().getZone()); + return DATE_FORMAT.get().format(dateTime); } /** - * @return a default instance of {@link CloseableHttpClient}. + * Convert time in ozone date format to millisecond. + * @return time in milliseconds */ - public static CloseableHttpClient newHttpClient() { - return OzoneClientUtils.newHttpClient(new OzoneConfiguration()); + public static long formatDateTime(String date) throws ParseException { + Preconditions.checkNotNull(date, "Date string should not be null."); + return ZonedDateTime.parse(date, DATE_FORMAT.get()) + .toInstant().getEpochSecond(); } - /** - * Returns a {@link CloseableHttpClient} configured by given configuration. - * If conf is null, returns a default instance. - * - * @param conf configuration - * @return a {@link CloseableHttpClient} instance. - */ - public static CloseableHttpClient newHttpClient(Configuration conf) { - long socketTimeout = OzoneConfigKeys - .OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT; - long connectionTimeout = OzoneConfigKeys - .OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT; - if (conf != null) { - socketTimeout = conf.getTimeDuration( - OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT, - OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT, - TimeUnit.MILLISECONDS); - connectionTimeout = conf.getTimeDuration( - OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT, - OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT, - TimeUnit.MILLISECONDS); - } - CloseableHttpClient client = HttpClients.custom() - .setDefaultRequestConfig( - RequestConfig.custom() - .setSocketTimeout(Math.toIntExact(socketTimeout)) - .setConnectTimeout(Math.toIntExact(connectionTimeout)) - .build()) - .build(); - return client; - } /** * verifies that bucket name / volume name is a valid DNS name. @@ -199,23 +170,53 @@ public final class OzoneClientUtils { } /** - * Convert time in millisecond to a human readable format required in ozone. - * @return a human readable string for the input time + * Returns the cache value to be used for list calls. + * @param conf Configuration object + * @return list cache size */ - public static String formatDateTime(long millis) { - ZonedDateTime dateTime = ZonedDateTime.ofInstant( - Instant.ofEpochSecond(millis), DATE_FORMAT.get().getZone()); - return DATE_FORMAT.get().format(dateTime); + public static int getListCacheSize(Configuration conf) { + return conf.getInt(OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE, + OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE_DEFAULT); } /** - * Convert time in ozone date format to millisecond. - * @return time in milliseconds + * @return a default instance of {@link CloseableHttpClient}. */ - public static long formatDateTime(String date) throws ParseException { - Preconditions.checkNotNull(date, "Date string should not be null."); - return ZonedDateTime.parse(date, DATE_FORMAT.get()) - .toInstant().getEpochSecond(); + public static CloseableHttpClient newHttpClient() { + return HddsClientUtils.newHttpClient(new Configuration()); + } + + /** + * Returns a {@link CloseableHttpClient} configured by given configuration. + * If conf is null, returns a default instance. + * + * @param conf configuration + * @return a {@link CloseableHttpClient} instance. + */ + public static CloseableHttpClient newHttpClient(Configuration conf) { + long socketTimeout = OzoneConfigKeys + .OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT; + long connectionTimeout = OzoneConfigKeys + .OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT; + if (conf != null) { + socketTimeout = conf.getTimeDuration( + OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT, + OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT, + TimeUnit.MILLISECONDS); + connectionTimeout = conf.getTimeDuration( + OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT, + OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT, + TimeUnit.MILLISECONDS); + } + + CloseableHttpClient client = HttpClients.custom() + .setDefaultRequestConfig( + RequestConfig.custom() + .setSocketTimeout(Math.toIntExact(socketTimeout)) + .setConnectTimeout(Math.toIntExact(connectionTimeout)) + .build()) + .build(); + return client; } /** diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/package-info.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java similarity index 95% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/package-info.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java index 9febb0ac14d..73ad78cd787 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/package-info.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.client; +package org.apache.hadoop.hdds.scm.client; /** * Client facing classes for the container operations. diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/package-info.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java similarity index 96% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/package-info.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java index e23763d777c..9390bc10203 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/package-info.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; /** * Classes for different type of container service client. diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkInputStream.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java similarity index 96% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkInputStream.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java index 8c54d654c36..9b8eaa9661f 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkInputStream.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java @@ -16,7 +16,15 @@ * limitations under the License. */ -package org.apache.hadoop.scm.storage; +package org.apache.hadoop.hdds.scm.storage; + +import com.google.protobuf.ByteString; +import org.apache.hadoop.fs.Seekable; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ReadChunkResponseProto; import java.io.EOFException; import java.io.IOException; @@ -25,14 +33,6 @@ import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; -import com.google.protobuf.ByteString; - -import org.apache.hadoop.fs.Seekable; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ReadChunkResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ChunkInfo; -import org.apache.hadoop.scm.XceiverClientSpi; -import org.apache.hadoop.scm.XceiverClientManager; - /** * An {@link InputStream} used by the REST service in combination with the * SCMClient to read the value of a key from a sequence diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkOutputStream.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java similarity index 93% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkOutputStream.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java index 52a981f66a5..b65df9f89b0 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkOutputStream.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java @@ -16,24 +16,24 @@ * limitations under the License. */ -package org.apache.hadoop.scm.storage; +package org.apache.hadoop.hdds.scm.storage; -import static org.apache.hadoop.scm.storage.ContainerProtocolCalls.putKey; -import static org.apache.hadoop.scm.storage.ContainerProtocolCalls.writeChunk; +import com.google.protobuf.ByteString; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.UUID; -import com.google.protobuf.ByteString; - -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ChunkInfo; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.KeyData; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.KeyValue; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.XceiverClientSpi; +import static org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls.putKey; +import static org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls + .writeChunk; /** * An {@link OutputStream} used by the REST service in combination with the diff --git a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/package-info.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java similarity index 95% rename from hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/package-info.java rename to hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java index b3674619225..6e7ce948784 100644 --- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/package-info.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.storage; +package org.apache.hadoop.hdds.scm.storage; /** * Low level IO streams to upload/download chunks from container service. diff --git a/hadoop-hdsl/common/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml similarity index 93% rename from hadoop-hdsl/common/dev-support/findbugsExcludeFile.xml rename to hadoop-hdds/common/dev-support/findbugsExcludeFile.xml index d93c4a1ebf2..3571a8929e3 100644 --- a/hadoop-hdsl/common/dev-support/findbugsExcludeFile.xml +++ b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml @@ -16,6 +16,6 @@ --> - + diff --git a/hadoop-hdsl/common/pom.xml b/hadoop-hdds/common/pom.xml similarity index 93% rename from hadoop-hdsl/common/pom.xml rename to hadoop-hdds/common/pom.xml index c2323af3a0d..6b2a156a158 100644 --- a/hadoop-hdsl/common/pom.xml +++ b/hadoop-hdds/common/pom.xml @@ -19,22 +19,21 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.apache.hadoop - hadoop-hdsl + hadoop-hdds 3.2.0-SNAPSHOT - hadoop-hdsl-common + hadoop-hdds-common 3.2.0-SNAPSHOT - Apache Hadoop HDSL Common utilities - Apache Hadoop HDSL Common + Apache Hadoop Distributed Data Store Common + Apache HDDS Common jar - hdsl + hdds true - org.fusesource.leveldbjni leveldbjni-all @@ -109,7 +108,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> StorageContainerLocationProtocol.proto DatanodeContainerProtocol.proto - hdsl.proto + hdds.proto ScmBlockLocationProtocol.proto diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java new file mode 100644 index 00000000000..665618cf24d --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java @@ -0,0 +1,6 @@ +package org.apache.hadoop.hdds; + +public class HddsConfigKeys { + private HddsConfigKeys() { + } +} diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java similarity index 96% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java index 6446618f5da..f00f503794f 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java @@ -16,35 +16,34 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl; - -import java.net.InetSocketAddress; - -import java.nio.file.Paths; -import java.util.Collection; -import java.util.HashSet; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.scm.ScmConfigKeys; +package org.apache.hadoop.hdds; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.net.HostAndPort; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED_DEFAULT; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.ozone.OzoneConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.net.InetSocketAddress; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.HashSet; + +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED_DEFAULT; + /** - * HDSL specific stateless utility functions. + * HDDS specific stateless utility functions. */ -public class HdslUtils { +public class HddsUtils { - private static final Logger LOG = LoggerFactory.getLogger(HdslUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(HddsUtils.class); /** * The service ID of the solitary Ozone SCM service. @@ -55,7 +54,7 @@ public class HdslUtils { private static final int NO_PORT = -1; - private HdslUtils() { + private HddsUtils() { } /** @@ -233,7 +232,7 @@ public class HdslUtils { return addresses; } - public static boolean isHdslEnabled(Configuration conf) { + public static boolean isHddsEnabled(Configuration conf) { String securityEnabled = conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple"); diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/OzoneQuota.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java similarity index 99% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/OzoneQuota.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java index 032dd60d99d..59708a956b9 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/OzoneQuota.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.client; +package org.apache.hadoop.hdds.client; import org.apache.hadoop.ozone.OzoneConsts; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationFactor.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java similarity index 97% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationFactor.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java index 971cfec7401..0215964ab8e 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationFactor.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.client; +package org.apache.hadoop.hdds.client; /** * The replication factor to be used while writing key into ozone. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationType.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationType.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java index 537c336e503..259a1a29317 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationType.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.client; +package org.apache.hadoop.hdds.client; /** * The replication type to be used while writing key into ozone. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java similarity index 88% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java index 749283874a7..e81f134b259 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java @@ -16,8 +16,8 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.client; +package org.apache.hadoop.hdds.client; /** - * Base property types for HDSL containers and replications. + * Base property types for HDDS containers and replications. */ \ No newline at end of file diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/OzoneConfiguration.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java similarity index 99% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/OzoneConfiguration.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java index a185156fe24..f07718c0c89 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/OzoneConfiguration.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java @@ -16,11 +16,11 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl.conf; +package org.apache.hadoop.hdds.conf; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; @@ -28,8 +28,9 @@ import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; /** * Configuration for ozone. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java index 2aa5c2501ce..948057ebba7 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java @@ -15,4 +15,4 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hdsl.conf; \ No newline at end of file +package org.apache.hadoop.hdds.conf; \ No newline at end of file diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java similarity index 90% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java index d24d0fb3368..f8894e6a7e8 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java @@ -16,8 +16,8 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl; +package org.apache.hadoop.hdds; /** - * Generic HDSL specific configurator and helper classes. + * Generic HDDS specific configurator and helper classes. */ \ No newline at end of file diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java similarity index 96% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java index 7049c30aee2..1463591861f 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java @@ -16,12 +16,12 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl.protocol; +package org.apache.hadoop.hdds.protocol; import com.google.common.base.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.util.UUID; @@ -222,7 +222,7 @@ public final class DatanodeDetails implements Comparable { * @return DatanodeDetails */ public static DatanodeDetails getFromProtoBuf( - HdslProtos.DatanodeDetailsProto datanodeDetailsProto) { + HddsProtos.DatanodeDetailsProto datanodeDetailsProto) { DatanodeDetails.Builder builder = newBuilder(); builder.setUuid(datanodeDetailsProto.getUuid()); if (datanodeDetailsProto.hasIpAddress()) { @@ -251,11 +251,11 @@ public final class DatanodeDetails implements Comparable { /** * Returns a DatanodeDetails protobuf message from a datanode ID. - * @return Hdsl.DatanodeDetailsProto + * @return HddsProtos.DatanodeDetailsProto */ - public HdslProtos.DatanodeDetailsProto getProtoBufMessage() { - HdslProtos.DatanodeDetailsProto.Builder builder = - HdslProtos.DatanodeDetailsProto.newBuilder() + public HddsProtos.DatanodeDetailsProto getProtoBufMessage() { + HddsProtos.DatanodeDetailsProto.Builder builder = + HddsProtos.DatanodeDetailsProto.newBuilder() .setUuid(getUuidString()); if (ipAddress != null) { builder.setIpAddress(ipAddress); diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java similarity index 89% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java index 7fd2543b7e1..7dae0fce02c 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java @@ -17,6 +17,6 @@ */ /** - * This package contains HDSL protocol related classes. + * This package contains HDDS protocol related classes. */ -package org.apache.hadoop.hdsl.protocol; \ No newline at end of file +package org.apache.hadoop.hdds.protocol; \ No newline at end of file diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java similarity index 94% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmConfigKeys.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java index 9d153597f44..7f40ab27fea 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmConfigKeys.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -121,18 +121,18 @@ public final class ScmConfigKeys { public static final int OZONE_SCM_HTTP_BIND_PORT_DEFAULT = 9876; public static final int OZONE_SCM_HTTPS_BIND_PORT_DEFAULT = 9877; - public static final String HDSL_REST_HTTP_ADDRESS_KEY = - "hdsl.rest.http-address"; - public static final String HDSL_REST_HTTP_ADDRESS_DEFAULT = "0.0.0.0:9880"; - public static final String HDSL_REST_CSRF_ENABLED_KEY = - "hdsl.rest.rest-csrf.enabled"; - public static final boolean HDSL_REST_CSRF_ENABLED_DEFAULT = false; - public static final String HDSL_REST_NETTY_HIGH_WATERMARK = - "hdsl.rest.netty.high.watermark"; - public static final int HDSL_REST_NETTY_HIGH_WATERMARK_DEFAULT = 65536; - public static final int HDSL_REST_NETTY_LOW_WATERMARK_DEFAULT = 32768; - public static final String HDSL_REST_NETTY_LOW_WATERMARK = - "hdsl.rest.netty.low.watermark"; + public static final String HDDS_REST_HTTP_ADDRESS_KEY = + "hdds.rest.http-address"; + public static final String HDDS_REST_HTTP_ADDRESS_DEFAULT = "0.0.0.0:9880"; + public static final String HDDS_REST_CSRF_ENABLED_KEY = + "hdds.rest.rest-csrf.enabled"; + public static final boolean HDDS_REST_CSRF_ENABLED_DEFAULT = false; + public static final String HDDS_REST_NETTY_HIGH_WATERMARK = + "hdds.rest.netty.high.watermark"; + public static final int HDDS_REST_NETTY_HIGH_WATERMARK_DEFAULT = 65536; + public static final int HDDS_REST_NETTY_LOW_WATERMARK_DEFAULT = 32768; + public static final String HDDS_REST_NETTY_LOW_WATERMARK = + "hdds.rest.netty.low.watermark"; public static final String OZONE_SCM_HANDLER_COUNT_KEY = "ozone.scm.handler.count.key"; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java similarity index 98% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmInfo.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java index e442fe25d8c..6236febb7b1 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmInfo.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; /** * ScmInfo wraps the result returned from SCM#getScmInfo which diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java similarity index 89% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java index 49817d32b9d..c96f79b2d5f 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java @@ -16,16 +16,16 @@ * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.Closeable; import java.io.IOException; @@ -125,5 +125,5 @@ public abstract class XceiverClientSpi implements Closeable { * * @return - {Stand_Alone, Ratis or Chained} */ - public abstract HdslProtos.ReplicationType getPipelineType(); + public abstract HddsProtos.ReplicationType getPipelineType(); } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/ScmClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java similarity index 86% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/ScmClient.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java index 79c60290a4e..0d4a2990b6c 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/ScmClient.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.client; +package org.apache.hadoop.hdds.scm.client; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.IOException; import java.util.EnumSet; @@ -111,8 +111,8 @@ public interface ScmClient { * @return Pipeline * @throws IOException - in case of error. */ - Pipeline createContainer(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor replicationFactor, String containerId, + Pipeline createContainer(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor replicationFactor, String containerId, String owner) throws IOException; /** @@ -123,8 +123,8 @@ public interface ScmClient { * @return A set of nodes that meet the requested criteria. * @throws IOException */ - HdslProtos.NodePool queryNode(EnumSet nodeStatuses, - HdslProtos.QueryScope queryScope, String poolName) throws IOException; + HddsProtos.NodePool queryNode(EnumSet nodeStatuses, + HddsProtos.QueryScope queryScope, String poolName) throws IOException; /** * Creates a specified replication pipeline. @@ -133,7 +133,7 @@ public interface ScmClient { * @param nodePool - Set of machines. * @throws IOException */ - Pipeline createReplicationPipeline(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool) + Pipeline createReplicationPipeline(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool) throws IOException; } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java index e85e542eea3..e2f7033d7fa 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.client; +package org.apache.hadoop.hdds.scm.client; /** * This package contains classes for the client of the storage container diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/ContainerStates/ContainerID.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java similarity index 97% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/ContainerStates/ContainerID.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java index a51d3b7352a..9520c8c46f7 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/ContainerStates/ContainerID.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java @@ -16,7 +16,7 @@ * */ -package org.apache.hadoop.ozone.scm.container.ContainerStates; +package org.apache.hadoop.hdds.scm.container; import com.google.common.base.Preconditions; import org.apache.commons.math3.util.MathUtils; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/AllocatedBlock.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java similarity index 97% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/AllocatedBlock.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java index f51336faf96..d253b15cd2f 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/AllocatedBlock.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; /** * Allocated block wraps the result returned from SCM#allocateBlock which diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/ContainerInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java similarity index 92% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/ContainerInfo.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java index 8e6fdbbdd5b..823a7fbc05a 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/ContainerInfo.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java @@ -16,13 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; import com.google.common.base.Preconditions; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerID; +import org.apache.hadoop.hdds.scm.container.ContainerID; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.util.Time; import java.util.Comparator; @@ -32,7 +32,7 @@ import java.util.Comparator; */ public class ContainerInfo implements Comparator, Comparable { - private HdslProtos.LifeCycleState state; + private HddsProtos.LifeCycleState state; private Pipeline pipeline; // Bytes allocated by SCM for clients. private long allocatedBytes; @@ -48,7 +48,7 @@ public class ContainerInfo ContainerInfo( long containerID, final String containerName, - HdslProtos.LifeCycleState state, + HddsProtos.LifeCycleState state, Pipeline pipeline, long allocatedBytes, long usedBytes, @@ -73,7 +73,7 @@ public class ContainerInfo public ContainerInfo() { } - public static ContainerInfo fromProtobuf(HdslProtos.SCMContainerInfo info) { + public static ContainerInfo fromProtobuf(HddsProtos.SCMContainerInfo info) { ContainerInfo.Builder builder = new ContainerInfo.Builder(); builder.setPipeline(Pipeline.getFromProtoBuf(info.getPipeline())); builder.setAllocatedBytes(info.getAllocatedBytes()); @@ -95,11 +95,11 @@ public class ContainerInfo return containerName; } - public HdslProtos.LifeCycleState getState() { + public HddsProtos.LifeCycleState getState() { return state; } - public void setState(HdslProtos.LifeCycleState state) { + public void setState(HddsProtos.LifeCycleState state) { this.state = state; } @@ -156,9 +156,9 @@ public class ContainerInfo allocatedBytes += size; } - public HdslProtos.SCMContainerInfo getProtobuf() { - HdslProtos.SCMContainerInfo.Builder builder = - HdslProtos.SCMContainerInfo.newBuilder(); + public HddsProtos.SCMContainerInfo getProtobuf() { + HddsProtos.SCMContainerInfo.Builder builder = + HddsProtos.SCMContainerInfo.newBuilder(); builder.setPipeline(getPipeline().getProtobufMessage()); builder.setAllocatedBytes(getAllocatedBytes()); builder.setUsedBytes(getUsedBytes()); @@ -268,7 +268,7 @@ public class ContainerInfo * Builder class for ContainerInfo. */ public static class Builder { - private HdslProtos.LifeCycleState state; + private HddsProtos.LifeCycleState state; private Pipeline pipeline; private long allocated; private long used; @@ -284,7 +284,7 @@ public class ContainerInfo return this; } - public Builder setState(HdslProtos.LifeCycleState lifeCycleState) { + public Builder setState(HddsProtos.LifeCycleState lifeCycleState) { this.state = lifeCycleState; return this; } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/DeleteBlockResult.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java similarity index 92% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/DeleteBlockResult.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java index dceaa6cd7b5..fd97eae3b70 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/DeleteBlockResult.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java @@ -15,10 +15,9 @@ * the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; - -import static org.apache.hadoop.hdsl.protocol.proto +import static org.apache.hadoop.hdds.protocol.proto .ScmBlockLocationProtocolProtos.DeleteScmBlockResult; /** diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java similarity index 92% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java index 9f2d1f4ef9c..32d0a2d85aa 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonFilter; @@ -29,8 +29,8 @@ import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.IOException; import java.util.ArrayList; @@ -83,14 +83,14 @@ public class Pipeline { * @param pipeline - ProtoBuf definition for the pipeline. * @return Pipeline Object */ - public static Pipeline getFromProtoBuf(HdslProtos.Pipeline pipeline) { + public static Pipeline getFromProtoBuf(HddsProtos.Pipeline pipeline) { Preconditions.checkNotNull(pipeline); PipelineChannel pipelineChannel = PipelineChannel.getFromProtoBuf(pipeline.getPipelineChannel()); return new Pipeline(pipeline.getContainerName(), pipelineChannel); } - public HdslProtos.ReplicationFactor getFactor() { + public HddsProtos.ReplicationFactor getFactor() { return pipelineChannel.getFactor(); } @@ -143,9 +143,9 @@ public class Pipeline { * @return Protobuf message */ @JsonIgnore - public HdslProtos.Pipeline getProtobufMessage() { - HdslProtos.Pipeline.Builder builder = - HdslProtos.Pipeline.newBuilder(); + public HddsProtos.Pipeline getProtobufMessage() { + HddsProtos.Pipeline.Builder builder = + HddsProtos.Pipeline.newBuilder(); builder.setContainerName(this.containerName); builder.setPipelineChannel(this.pipelineChannel.getProtobufMessage()); return builder.build(); @@ -194,7 +194,7 @@ public class Pipeline { * * @return - LifeCycleStates. */ - public HdslProtos.LifeCycleState getLifeCycleState() { + public HddsProtos.LifeCycleState getLifeCycleState() { return pipelineChannel.getLifeCycleState(); } @@ -212,7 +212,7 @@ public class Pipeline { * * @return type - Standalone, Ratis, Chained. */ - public HdslProtos.ReplicationType getType() { + public HddsProtos.ReplicationType getType() { return pipelineChannel.getType(); } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java similarity index 83% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java index 1937968b66a..ebd52e99844 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java @@ -15,15 +15,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import java.util.Map; import java.util.TreeMap; @@ -82,9 +82,9 @@ public class PipelineChannel { } @JsonIgnore - public HdslProtos.PipelineChannel getProtobufMessage() { - HdslProtos.PipelineChannel.Builder builder = - HdslProtos.PipelineChannel.newBuilder(); + public HddsProtos.PipelineChannel getProtobufMessage() { + HddsProtos.PipelineChannel.Builder builder = + HddsProtos.PipelineChannel.newBuilder(); for (DatanodeDetails datanode : datanodes.values()) { builder.addMembers(datanode.getProtoBufMessage()); } @@ -104,7 +104,7 @@ public class PipelineChannel { } public static PipelineChannel getFromProtoBuf( - HdslProtos.PipelineChannel transportProtos) { + HddsProtos.PipelineChannel transportProtos) { Preconditions.checkNotNull(transportProtos); PipelineChannel pipelineChannel = new PipelineChannel(transportProtos.getLeaderID(), @@ -113,7 +113,7 @@ public class PipelineChannel { transportProtos.getFactor(), transportProtos.getName()); - for (HdslProtos.DatanodeDetailsProto dataID : + for (HddsProtos.DatanodeDetailsProto dataID : transportProtos.getMembersList()) { pipelineChannel.addMember(DatanodeDetails.getFromProtoBuf(dataID)); } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/StorageContainerException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java similarity index 96% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/StorageContainerException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java index e285aec32c9..35d8444b9dc 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/StorageContainerException.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java @@ -15,9 +15,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import java.io.IOException; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java similarity index 93% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java index 3fa966379b5..ffe0d3d4d99 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.container.common.helpers; +package org.apache.hadoop.hdds.scm.container.common.helpers; /** Contains protocol buffer helper classes and utilites used in impl. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java similarity index 96% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java index ad24f9877af..3c544db3ab9 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; /** * This package contains classes for the client of the storage container diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/LocatedContainer.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java similarity index 98% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/LocatedContainer.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java index 469dab5188f..14ee3d2f80b 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/LocatedContainer.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.protocol; +package org.apache.hadoop.hdds.scm.protocol; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmBlockLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java similarity index 89% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmBlockLocationProtocol.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java index 0bb84acc053..f100fc702ce 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmBlockLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java @@ -15,19 +15,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.protocol; +package org.apache.hadoop.hdds.scm.protocol; + +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.ozone.common.BlockGroup; +import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; import java.io.IOException; import java.util.List; import java.util.Set; -import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; -import org.apache.hadoop.ozone.common.BlockGroup; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.scm.ScmInfo; - /** * ScmBlockLocationProtocol is used by an HDFS node to find the set of nodes * to read/write a block. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmLocatedBlock.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java similarity index 98% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmLocatedBlock.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java index 4e4b3d669ea..6cbdee42387 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmLocatedBlock.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java @@ -16,14 +16,14 @@ * limitations under the License. */ -package org.apache.hadoop.scm.protocol; - -import java.util.List; -import java.util.stream.Collectors; +package org.apache.hadoop.hdds.scm.protocol; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; +import java.util.List; +import java.util.stream.Collectors; + /** * Holds the nodes that currently host the block for a block key. */ diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java similarity index 82% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/StorageContainerLocationProtocol.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java index 01fc07552be..a60fbb2f222 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/StorageContainerLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java @@ -15,18 +15,19 @@ * the License. */ -package org.apache.hadoop.scm.protocol; +package org.apache.hadoop.hdds.scm.protocol; + +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; import java.io.IOException; import java.util.EnumSet; import java.util.List; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; - /** * ContainerLocationProtocol is used by an HDFS node to find the set of nodes * that currently host a container. @@ -37,8 +38,8 @@ public interface StorageContainerLocationProtocol { * set of datanodes that should be used creating this container. * */ - Pipeline allocateContainer(HdslProtos.ReplicationType replicationType, - HdslProtos.ReplicationFactor factor, String containerName, String owner) + Pipeline allocateContainer(HddsProtos.ReplicationType replicationType, + HddsProtos.ReplicationFactor factor, String containerName, String owner) throws IOException; /** @@ -85,8 +86,8 @@ public interface StorageContainerLocationProtocol { * @param nodeStatuses * @return List of Datanodes. */ - HdslProtos.NodePool queryNode(EnumSet nodeStatuses, - HdslProtos.QueryScope queryScope, String poolName) throws IOException; + HddsProtos.NodePool queryNode(EnumSet nodeStatuses, + HddsProtos.QueryScope queryScope, String poolName) throws IOException; /** * Notify from client when begin or finish creating objects like pipeline @@ -109,8 +110,8 @@ public interface StorageContainerLocationProtocol { * @param nodePool - optional machine list to build a pipeline. * @throws IOException */ - Pipeline createReplicationPipeline(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool) + Pipeline createReplicationPipeline(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool) throws IOException; /** diff --git a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java similarity index 94% rename from hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java index d1f97755bc8..b56a749453f 100644 --- a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java @@ -16,4 +16,4 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; \ No newline at end of file +package org.apache.hadoop.hdds.scm.protocol; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java similarity index 82% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java index 0de759ff233..0012f3e4a8a 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java @@ -14,31 +14,39 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.scm.protocolPB; +package org.apache.hadoop.hdds.scm.protocolPB; import com.google.common.base.Preconditions; import com.google.common.collect.Sets; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .AllocateScmBlockRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .AllocateScmBlockResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteScmKeyBlocksRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteScmKeyBlocksResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .GetScmBlockLocationsRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .GetScmBlockLocationsResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .KeyBlocks; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .ScmLocatedBlockProto; import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; import org.apache.hadoop.ozone.common.BlockGroup; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmKeyBlocksRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.AllocateScmBlockRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.AllocateScmBlockResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmKeyBlocksResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.GetScmBlockLocationsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.GetScmBlockLocationsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.ScmLocatedBlockProto; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.KeyBlocks; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; import java.io.Closeable; import java.io.IOException; @@ -117,7 +125,7 @@ public final class ScmBlockLocationProtocolClientSideTranslatorPB */ @Override public AllocatedBlock allocateBlock(long size, - HdslProtos.ReplicationType type, HdslProtos.ReplicationFactor factor, + HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor, String owner) throws IOException { Preconditions.checkArgument(size > 0, "block size must be greater than 0"); @@ -181,9 +189,9 @@ public final class ScmBlockLocationProtocolClientSideTranslatorPB */ @Override public ScmInfo getScmInfo() throws IOException { - HdslProtos.GetScmInfoRequestProto request = - HdslProtos.GetScmInfoRequestProto.getDefaultInstance(); - HdslProtos.GetScmInfoRespsonseProto resp; + HddsProtos.GetScmInfoRequestProto request = + HddsProtos.GetScmInfoRequestProto.getDefaultInstance(); + HddsProtos.GetScmInfoRespsonseProto resp; try { resp = rpcProxy.getScmInfo(NULL_RPC_CONTROLLER, request); } catch (ServiceException e) { diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java similarity index 92% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolPB.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java index 019aeeb1921..837c95b2aa0 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java @@ -15,12 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.protocolPB; +package org.apache.hadoop.hdds.scm.protocolPB; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.ipc.ProtocolInfo; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos .ScmBlockLocationProtocolService; +import org.apache.hadoop.ipc.ProtocolInfo; /** * Protocol used from an HDFS node to StorageContainerManager. This extends the diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java similarity index 80% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java index 348b266e5c1..3638f63e65d 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java @@ -14,33 +14,45 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.scm.protocolPB; +package org.apache.hadoop.hdds.scm.protocolPB; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.GetContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.GetContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.NodeQueryRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.NodeQueryResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.PipelineRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.PipelineResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMListContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMListContainerResponseProto; import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.protocol.StorageContainerLocationProtocol; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMListContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMListContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.NodeQueryRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.NodeQueryResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.PipelineRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.PipelineResponseProto; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import java.io.Closeable; import java.io.IOException; @@ -85,8 +97,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB * @throws IOException */ @Override - public Pipeline allocateContainer(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, String + public Pipeline allocateContainer(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, String containerName, String owner) throws IOException { Preconditions.checkNotNull(containerName, "Container Name cannot be Null"); @@ -151,7 +163,7 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB SCMListContainerResponseProto response = rpcProxy.listContainer(NULL_RPC_CONTROLLER, request); List containerList = new ArrayList<>(); - for (HdslProtos.SCMContainerInfo containerInfoProto : response + for (HddsProtos.SCMContainerInfo containerInfoProto : response .getContainersList()) { containerList.add(ContainerInfo.fromProtobuf(containerInfoProto)); } @@ -191,8 +203,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB * @return List of Datanodes. */ @Override - public HdslProtos.NodePool queryNode(EnumSet - nodeStatuses, HdslProtos.QueryScope queryScope, String poolName) + public HddsProtos.NodePool queryNode(EnumSet + nodeStatuses, HddsProtos.QueryScope queryScope, String poolName) throws IOException { // TODO : We support only cluster wide query right now. So ignoring checking // queryScope and poolName @@ -248,8 +260,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB * @throws IOException */ @Override - public Pipeline createReplicationPipeline(HdslProtos.ReplicationType - replicationType, HdslProtos.ReplicationFactor factor, HdslProtos + public Pipeline createReplicationPipeline(HddsProtos.ReplicationType + replicationType, HddsProtos.ReplicationFactor factor, HddsProtos .NodePool nodePool) throws IOException { PipelineRequestProto request = PipelineRequestProto.newBuilder() .setNodePool(nodePool) @@ -277,10 +289,10 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB @Override public ScmInfo getScmInfo() throws IOException { - HdslProtos.GetScmInfoRequestProto request = - HdslProtos.GetScmInfoRequestProto.getDefaultInstance(); + HddsProtos.GetScmInfoRequestProto request = + HddsProtos.GetScmInfoRequestProto.getDefaultInstance(); try { - HdslProtos.GetScmInfoRespsonseProto resp = rpcProxy.getScmInfo( + HddsProtos.GetScmInfoRespsonseProto resp = rpcProxy.getScmInfo( NULL_RPC_CONTROLLER, request); ScmInfo.Builder builder = new ScmInfo.Builder() .setClusterId(resp.getClusterId()) diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java similarity index 88% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolPB.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java index b8c2958afe4..f234ad31298 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java @@ -15,11 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm.protocolPB; +package org.apache.hadoop.hdds.scm.protocolPB; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos + .StorageContainerLocationProtocolService; import org.apache.hadoop.ipc.ProtocolInfo; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.StorageContainerLocationProtocolService; /** * Protocol used from an HDFS node to StorageContainerManager. This extends the diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java index f9a2c090f25..652ae60973c 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.protocolPB; +package org.apache.hadoop.hdds.scm.protocolPB; /** * This package contains classes for the client of the storage container diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java similarity index 91% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java index 174f1c17e9f..1559816bc4d 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java @@ -16,44 +16,43 @@ * limitations under the License. */ -package org.apache.hadoop.scm.storage; +package org.apache.hadoop.hdds.scm.storage; import com.google.protobuf.ByteString; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ChunkInfo; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.scm.XceiverClientSpi; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .GetKeyRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .GetKeyResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .GetSmallFileRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .GetSmallFileResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.KeyData; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .PutKeyRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .PutSmallFileRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ReadChunkRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ReadChunkResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Type; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .WriteChunkRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .ReadContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ReadContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.KeyValue; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ReadContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .WriteChunkRequestProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue; import java.io.IOException; -import org.apache.hadoop.scm.XceiverClientSpi; /** * Implementation of all container protocol calls performed by Container diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java index aa89af0695f..8e981586bd6 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.scm.storage; +package org.apache.hadoop.hdds.scm.storage; /** * This package contains StorageContainerManager classes. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java similarity index 98% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java index bad3a84c939..ef96f379398 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java @@ -20,10 +20,9 @@ package org.apache.hadoop.ozone; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.ozone.client.ReplicationFactor; -import org.apache.hadoop.ozone.client.ReplicationType; - -import org.apache.hadoop.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.client.ReplicationFactor; +import org.apache.hadoop.hdds.client.ReplicationType; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; /** * This class contains constants for configuration keys used in Ozone. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java similarity index 96% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java index 9fcc61334e6..38ce6ccb0a6 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java @@ -17,8 +17,8 @@ package org.apache.hadoop.ozone.common; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.KeyBlocks; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .KeyBlocks; import java.util.List; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java similarity index 91% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java index da563858440..ec54ac54076 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java @@ -17,9 +17,11 @@ */ package org.apache.hadoop.ozone.common; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmBlockResult; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmBlockResult.Result; -import org.apache.hadoop.scm.container.common.helpers.DeleteBlockResult; +import org.apache.hadoop.hdds.scm.container.common.helpers.DeleteBlockResult; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteScmBlockResult; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteScmBlockResult.Result; import java.util.ArrayList; import java.util.List; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java index c3f9234fa9c..518b5194781 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.ozone.common; -import java.io.File; -import java.io.IOException; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import java.io.File; +import java.io.IOException; + /** * The exception is thrown when file system state is inconsistent * and is not recoverable. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java similarity index 99% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java index 9df2ffaafbd..fb30d921b8d 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java @@ -17,6 +17,13 @@ */ package org.apache.hadoop.ozone.common; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType; +import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.IOException; import java.nio.file.DirectoryStream; @@ -24,14 +31,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Properties; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeType; -import org.apache.hadoop.util.Time; - /** * Storage information file. This Class defines the methods to check * the consistency of the storage dir and the version file. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java similarity index 98% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java index a79980ab034..0e98a4c8a9f 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java @@ -17,18 +17,17 @@ */ package org.apache.hadoop.ozone.common; -import java.io.IOException; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.File; -import java.io.RandomAccessFile; - -import java.util.Properties; -import java.util.UUID; - import com.google.common.base.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeType; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.util.Properties; +import java.util.UUID; /** * Common class for storage information. This class defines the common diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java index 4aa36c38ad7..aa1fe74b2a0 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java @@ -19,8 +19,8 @@ package org.apache.hadoop.ozone.container.common.helpers; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.IOException; import java.util.Map; @@ -111,8 +111,8 @@ public class ChunkInfo { } for (Map.Entry entry : metadata.entrySet()) { - HdslProtos.KeyValue.Builder keyValBuilder = - HdslProtos.KeyValue.newBuilder(); + HddsProtos.KeyValue.Builder keyValBuilder = + HddsProtos.KeyValue.newBuilder(); builder.addMetadata(keyValBuilder.setKey(entry.getKey()) .setValue(entry.getValue()).build()); } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java index eb021525b8b..be546c75c3a 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.ozone.container.common.helpers; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.IOException; import java.util.Collections; @@ -82,8 +82,8 @@ public class KeyData { builder.setName(this.getKeyName()); builder.addAllChunks(this.chunks); for (Map.Entry entry : metadata.entrySet()) { - HdslProtos.KeyValue.Builder keyValBuilder = - HdslProtos.KeyValue.newBuilder(); + HddsProtos.KeyValue.Builder keyValBuilder = + HddsProtos.KeyValue.newBuilder(); builder.addMetadata(keyValBuilder.setKey(entry.getKey()) .setValue(entry.getValue()).build()); } diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java similarity index 76% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java index c211bd5b9be..fa793419bf0 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java @@ -21,30 +21,31 @@ import com.google.common.collect.Sets; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .AllocateScmBlockRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .AllocateScmBlockResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteKeyBlocksResultProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteScmKeyBlocksRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .DeleteScmKeyBlocksResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .GetScmBlockLocationsRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .GetScmBlockLocationsResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos + .ScmLocatedBlockProto; import org.apache.hadoop.ozone.common.BlockGroup; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteKeyBlocksResultProto; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; -import org.apache.hadoop.scm.protocol.StorageContainerLocationProtocol; -import org.apache.hadoop.scm.protocolPB.ScmBlockLocationProtocolPB; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.AllocateScmBlockRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.AllocateScmBlockResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.DeleteScmKeyBlocksRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.DeleteScmKeyBlocksResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.GetScmBlockLocationsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.GetScmBlockLocationsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.ScmLocatedBlockProto; import java.io.IOException; import java.util.List; @@ -152,8 +153,8 @@ public final class ScmBlockLocationProtocolServerSideTranslatorPB } @Override - public HdslProtos.GetScmInfoRespsonseProto getScmInfo( - RpcController controller, HdslProtos.GetScmInfoRequestProto req) + public HddsProtos.GetScmInfoRespsonseProto getScmInfo( + RpcController controller, HddsProtos.GetScmInfoRequestProto req) throws ServiceException { ScmInfo scmInfo; try { @@ -161,7 +162,7 @@ public final class ScmBlockLocationProtocolServerSideTranslatorPB } catch (IOException ex) { throw new ServiceException(ex); } - return HdslProtos.GetScmInfoRespsonseProto.newBuilder() + return HddsProtos.GetScmInfoRespsonseProto.newBuilder() .setClusterId(scmInfo.getClusterId()) .setScmId(scmInfo.getScmId()) .build(); diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java similarity index 73% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java index be19c68eb51..4974268bcb7 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java @@ -18,36 +18,46 @@ */ package org.apache.hadoop.ozone.protocolPB; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.GetContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.GetContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ObjectStageChangeResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.PipelineRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.PipelineResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMDeleteContainerResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMListContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.SCMListContainerResponseProto; + import java.io.IOException; import java.util.EnumSet; import java.util.List; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerLocationProtocolProtos; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.protocol.StorageContainerLocationProtocol; - -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMDeleteContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMListContainerResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMListContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.PipelineResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.PipelineRequestProto; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB; - /** * This class is the server-side translator that forwards requests received on * {@link StorageContainerLocationProtocolPB} to the @@ -150,9 +160,9 @@ public final class StorageContainerLocationProtocolServerSideTranslatorPB StorageContainerLocationProtocolProtos.NodeQueryRequestProto request) throws ServiceException { try { - EnumSet nodeStateEnumSet = EnumSet.copyOf(request + EnumSet nodeStateEnumSet = EnumSet.copyOf(request .getQueryList()); - HdslProtos.NodePool datanodes = impl.queryNode(nodeStateEnumSet, + HddsProtos.NodePool datanodes = impl.queryNode(nodeStateEnumSet, request.getScope(), request.getPoolName()); return StorageContainerLocationProtocolProtos .NodeQueryResponseProto.newBuilder() @@ -185,12 +195,12 @@ public final class StorageContainerLocationProtocolServerSideTranslatorPB } @Override - public HdslProtos.GetScmInfoRespsonseProto getScmInfo( - RpcController controller, HdslProtos.GetScmInfoRequestProto req) + public HddsProtos.GetScmInfoRespsonseProto getScmInfo( + RpcController controller, HddsProtos.GetScmInfoRequestProto req) throws ServiceException { try { ScmInfo scmInfo = impl.getScmInfo(); - return HdslProtos.GetScmInfoRespsonseProto.newBuilder() + return HddsProtos.GetScmInfoRespsonseProto.newBuilder() .setClusterId(scmInfo.getClusterId()) .setScmId(scmInfo.getScmId()) .build(); diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java index 909873f1d16..af56da394cd 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java @@ -18,14 +18,14 @@ package org.apache.hadoop.ozone.web.utils; -import java.io.IOException; -import java.util.List; - import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.type.CollectionType; +import java.io.IOException; +import java.util.List; + /** * JSON Utility functions used in ozone. */ diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java index 2ff4e5562f3..431da640945 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java @@ -24,16 +24,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; +import java.util.concurrent.CompletionService; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; -import java.util.concurrent.Executors; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.CompletionService; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.Future; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; /** * An abstract class for a background service in ozone. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java index 72ac8d16f77..83ca83d80d5 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java @@ -21,21 +21,21 @@ package org.apache.hadoop.utils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.fusesource.leveldbjni.JniDBFactory; -import org.iq80.leveldb.WriteBatch; import org.iq80.leveldb.DB; -import org.iq80.leveldb.Options; -import org.iq80.leveldb.WriteOptions; import org.iq80.leveldb.DBIterator; -import org.iq80.leveldb.Snapshot; +import org.iq80.leveldb.Options; import org.iq80.leveldb.ReadOptions; +import org.iq80.leveldb.Snapshot; +import org.iq80.leveldb.WriteBatch; +import org.iq80.leveldb.WriteOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; -import java.util.List; import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Map.Entry; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java similarity index 95% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java index 095e7187783..9e9c32ae561 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java @@ -20,23 +20,24 @@ package org.apache.hadoop.utils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ozone.OzoneConfigKeys; +import org.iq80.leveldb.Options; +import org.rocksdb.BlockBasedTableConfig; +import org.rocksdb.Statistics; +import org.rocksdb.StatsLevel; + +import java.io.File; +import java.io.IOException; + +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_METADATA_STORE_IMPL_LEVELDB; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_METADATA_STORE_IMPL_ROCKSDB; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_METADATA_STORE_ROCKSDB_STATISTICS; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF; -import org.iq80.leveldb.Options; -import org.rocksdb.BlockBasedTableConfig; - -import java.io.File; -import java.io.IOException; - -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB; - -import org.rocksdb.Statistics; -import org.rocksdb.StatsLevel; /** * Builder for metadata store. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java index 2f340a534b9..a60e98d9ab2 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java @@ -23,25 +23,25 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.ratis.shaded.com.google.common.annotations.VisibleForTesting; -import org.rocksdb.RocksIterator; +import org.rocksdb.DbPath; import org.rocksdb.Options; -import org.rocksdb.WriteOptions; import org.rocksdb.RocksDB; import org.rocksdb.RocksDBException; +import org.rocksdb.RocksIterator; import org.rocksdb.WriteBatch; -import org.rocksdb.DbPath; +import org.rocksdb.WriteOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.ObjectName; import java.io.File; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.List; +import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; -import java.util.AbstractMap; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * RocksDB implementation of ozone metadata store. diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java similarity index 92% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java index 8eb0e4b9cc8..88c093e62bf 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java @@ -23,7 +23,15 @@ import org.rocksdb.HistogramType; import org.rocksdb.Statistics; import org.rocksdb.TickerType; -import javax.management.*; +import javax.management.Attribute; +import javax.management.AttributeList; +import javax.management.AttributeNotFoundException; +import javax.management.DynamicMBean; +import javax.management.InvalidAttributeValueException; +import javax.management.MBeanAttributeInfo; +import javax.management.MBeanException; +import javax.management.MBeanInfo; +import javax.management.ReflectionException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashSet; diff --git a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/hadoop/utils/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java similarity index 97% rename from hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java rename to hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java index 89a1cb54f77..3a55831364f 100644 --- a/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java +++ b/hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java @@ -18,9 +18,9 @@ package org.apache.ratis; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.ratis.client.RaftClient; import org.apache.ratis.conf.RaftProperties; import org.apache.ratis.grpc.GrpcConfigKeys; @@ -34,11 +34,11 @@ import org.apache.ratis.util.SizeInBytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.List; -import java.util.Collections; -import java.util.Collection; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.stream.Collectors; /** diff --git a/hadoop-hdsl/common/src/main/java/org/apache/ratis/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/ratis/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/ratis/package-info.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java rename to hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java diff --git a/hadoop-hdsl/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java rename to hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java diff --git a/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto b/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto similarity index 99% rename from hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto rename to hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto index 3060ada59c1..a6270eff506 100644 --- a/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto +++ b/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto @@ -24,12 +24,12 @@ // This file contains protocol buffers that are used to transfer data // to and from the datanode. -option java_package = "org.apache.hadoop.hdsl.protocol.proto"; +option java_package = "org.apache.hadoop.hdds.protocol.proto"; option java_outer_classname = "ContainerProtos"; option java_generate_equals_and_hash = true; -package hadoop.hdsl; +package hadoop.hdds; import "hdfs.proto"; -import "hdsl.proto"; +import "hdds.proto"; /** * Commands that are used to manipulate the state of containers on a datanode. diff --git a/hadoop-hdsl/common/src/main/proto/ScmBlockLocationProtocol.proto b/hadoop-hdds/common/src/main/proto/ScmBlockLocationProtocol.proto similarity index 92% rename from hadoop-hdsl/common/src/main/proto/ScmBlockLocationProtocol.proto rename to hadoop-hdds/common/src/main/proto/ScmBlockLocationProtocol.proto index f6805d9a73e..38d2e16ce8b 100644 --- a/hadoop-hdsl/common/src/main/proto/ScmBlockLocationProtocol.proto +++ b/hadoop-hdds/common/src/main/proto/ScmBlockLocationProtocol.proto @@ -22,14 +22,14 @@ * for what changes are allowed for a *unstable* .proto interface. */ -option java_package = "org.apache.hadoop.hdsl.protocol.proto"; +option java_package = "org.apache.hadoop.hdds.protocol.proto"; option java_outer_classname = "ScmBlockLocationProtocolProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; -package hadoop.hdsl; +package hadoop.hdds; import "hdfs.proto"; -import "hdsl.proto"; +import "hdds.proto"; // SCM Block protocol @@ -53,7 +53,7 @@ message GetScmBlockLocationsResponseProto { */ message ScmLocatedBlockProto { required string key = 1; - required hadoop.hdsl.Pipeline pipeline = 2; + required hadoop.hdds.Pipeline pipeline = 2; } /** @@ -62,7 +62,7 @@ message ScmLocatedBlockProto { message AllocateScmBlockRequestProto { required uint64 size = 1; required ReplicationType type = 2; - required hadoop.hdsl.ReplicationFactor factor = 3; + required hadoop.hdds.ReplicationFactor factor = 3; required string owner = 4; } @@ -127,7 +127,7 @@ message AllocateScmBlockResponseProto { } required Error errorCode = 1; required string key = 2; - required hadoop.hdsl.Pipeline pipeline = 3; + required hadoop.hdds.Pipeline pipeline = 3; required bool createContainer = 4; optional string errorMessage = 5; } @@ -161,6 +161,6 @@ service ScmBlockLocationProtocolService { /** * Gets the scmInfo from SCM. */ - rpc getScmInfo(hadoop.hdsl.GetScmInfoRequestProto) - returns (hadoop.hdsl.GetScmInfoRespsonseProto); + rpc getScmInfo(hadoop.hdds.GetScmInfoRequestProto) + returns (hadoop.hdds.GetScmInfoRespsonseProto); } diff --git a/hadoop-hdsl/common/src/main/proto/StorageContainerLocationProtocol.proto b/hadoop-hdds/common/src/main/proto/StorageContainerLocationProtocol.proto similarity index 98% rename from hadoop-hdsl/common/src/main/proto/StorageContainerLocationProtocol.proto rename to hadoop-hdds/common/src/main/proto/StorageContainerLocationProtocol.proto index 795e37c6e54..d7540a3fe4f 100644 --- a/hadoop-hdsl/common/src/main/proto/StorageContainerLocationProtocol.proto +++ b/hadoop-hdds/common/src/main/proto/StorageContainerLocationProtocol.proto @@ -22,14 +22,14 @@ * for what changes are allowed for a *unstable* .proto interface. */ -option java_package = "org.apache.hadoop.hdsl.protocol.proto"; +option java_package = "org.apache.hadoop.hdds.protocol.proto"; option java_outer_classname = "StorageContainerLocationProtocolProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; -package hadoop.hdsl; +package hadoop.hdds; import "hdfs.proto"; -import "hdsl.proto"; +import "hdds.proto"; /** * Request send to SCM asking where the container should be created. diff --git a/hadoop-hdsl/common/src/main/proto/hdsl.proto b/hadoop-hdds/common/src/main/proto/hdds.proto similarity index 97% rename from hadoop-hdsl/common/src/main/proto/hdsl.proto rename to hadoop-hdds/common/src/main/proto/hdds.proto index a4baa9797b8..f7b2f72b483 100644 --- a/hadoop-hdsl/common/src/main/proto/hdsl.proto +++ b/hadoop-hdds/common/src/main/proto/hdds.proto @@ -22,11 +22,11 @@ * for what changes are allowed for a *unstable* .proto interface. */ -option java_package = "org.apache.hadoop.hdsl.protocol.proto"; -option java_outer_classname = "HdslProtos"; +option java_package = "org.apache.hadoop.hdds.protocol.proto"; +option java_outer_classname = "HddsProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; -package hadoop.hdsl; +package hadoop.hdds; message DatanodeDetailsProto { // TODO: make the port as a seperate proto message and use it here diff --git a/hadoop-hdsl/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml similarity index 99% rename from hadoop-hdsl/common/src/main/resources/ozone-default.xml rename to hadoop-hdds/common/src/main/resources/ozone-default.xml index 9feadcf7b08..8018d294d1b 100644 --- a/hadoop-hdsl/common/src/main/resources/ozone-default.xml +++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml @@ -535,7 +535,7 @@ ozone.scm.container.placement.impl - org.apache.hadoop.ozone.scm.container.placement.algorithms.SCMContainerPlacementRandom + org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementRandom OZONE, MANAGEMENT Placement policy class for containers. @@ -995,7 +995,7 @@ - hdsl.rest.rest-csrf.enabled + hdds.rest.rest-csrf.enabled false If true, then enables Object Store REST server protection against @@ -1004,7 +1004,7 @@ - hdsl.rest.http-address + hdds.rest.http-address 0.0.0.0:9880 The http address of Object Store REST server inside the datanode. @@ -1012,7 +1012,7 @@ - hdsl.rest.netty.high.watermark + hdds.rest.netty.high.watermark 65535 High watermark configuration to Netty for Object Store REST server. @@ -1020,7 +1020,7 @@ - hdsl.rest.netty.low.watermark + hdds.rest.netty.low.watermark 32768 Low watermark configuration to Netty for Object Store REST server. diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/scm/TestArchive.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/TestArchive.java similarity index 99% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/scm/TestArchive.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/TestArchive.java index 86adddb9cef..f53f770fcc9 100644 --- a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/scm/TestArchive.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/TestArchive.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.RandomUtils; diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/scm/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/package-info.java similarity index 95% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/scm/package-info.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/package-info.java index 9c480d6fff3..796694171f4 100644 --- a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/scm/package-info.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/package-info.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.scm; +package org.apache.hadoop.hdds.scm; /** Test cases for SCM client classes. */ \ No newline at end of file diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java similarity index 99% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java index cee66cae165..6b26b60350e 100644 --- a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java @@ -21,36 +21,36 @@ import com.google.common.collect.Lists; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtilClient; -import org.apache.hadoop.utils.BatchOperation; -import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; +import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; -import org.junit.Rule; -import org.junit.Before; import org.junit.After; -import org.junit.Test; import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.ArrayList; import java.util.UUID; -import java.util.Collection; -import java.util.Arrays; -import java.util.Iterator; import java.util.concurrent.atomic.AtomicInteger; -import static org.junit.runners.Parameterized.*; +import static org.junit.runners.Parameterized.Parameters; /** * Test class for ozone metadata store. diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java similarity index 100% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java similarity index 97% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java index d4c626e9f5f..c1470bb2efc 100644 --- a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java @@ -18,7 +18,8 @@ package org.apache.hadoop.ozone.common; import org.apache.commons.collections.SetUtils; -import org.apache.hadoop.ozone.common.statemachine.InvalidStateTransitionException; +import org.apache.hadoop.ozone.common.statemachine + .InvalidStateTransitionException; import org.apache.hadoop.ozone.common.statemachine.StateMachine; import org.junit.Assert; import org.junit.Rule; @@ -28,12 +29,13 @@ import org.junit.rules.ExpectedException; import java.util.HashSet; import java.util.Set; -import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.INIT; -import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CREATING; -import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.OPERATIONAL; -import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CLOSED; import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CLEANUP; +import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CLOSED; +import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CREATING; import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.FINAL; +import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.INIT; +import static org.apache.hadoop.ozone.common.TestStateMachine.STATES + .OPERATIONAL; /** * This class is to test ozone common state machine. diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java similarity index 100% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/package-info.java similarity index 100% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/ozone/package-info.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/package-info.java diff --git a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java similarity index 98% rename from hadoop-hdsl/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java index e4f00f9dfe0..a7ce60bbeb1 100644 --- a/hadoop-hdsl/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java @@ -18,7 +18,7 @@ package org.apache.hadoop.utils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; diff --git a/hadoop-hdsl/container-service/pom.xml b/hadoop-hdds/container-service/pom.xml similarity index 87% rename from hadoop-hdsl/container-service/pom.xml rename to hadoop-hdds/container-service/pom.xml index 7d6d5433963..736272d1a41 100644 --- a/hadoop-hdsl/container-service/pom.xml +++ b/hadoop-hdds/container-service/pom.xml @@ -19,29 +19,29 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.apache.hadoop - hadoop-hdsl + hadoop-hdds 3.2.0-SNAPSHOT - hadoop-hdsl-container-service + hadoop-hdds-container-service 3.2.0-SNAPSHOT - Apache Hadoop HDSL Container server - Apache Hadoop HDSL Container server + Apache HDDS Container server + Apache HDDS Container server jar - hdsl + hdds true org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework provided @@ -79,7 +79,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ - ${basedir}/../../hadoop-hdsl/common/src/main/proto/ + ${basedir}/../../hadoop-hdds/common/src/main/proto/ ${basedir}/src/main/proto diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java similarity index 89% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java index 21123eba96c..956aef26d9c 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java @@ -15,13 +15,12 @@ * the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; import com.google.common.base.Optional; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.scm.ScmConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,17 +29,36 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.apache.hadoop.hdsl.HdslUtils.*; -import static org.apache.hadoop.hdsl.server.ServerUtils.sanitizeUserArgs; -import static org.apache.hadoop.scm.ScmConfigKeys.*; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DEADNODE_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DEADNODE_INTERVAL_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_RPC_TIMEOUT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_STALENODE_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_STALENODE_INTERVAL_DEFAULT; +import static org.apache.hadoop.hdds.HddsUtils.*; +import static org.apache.hadoop.hdds.server.ServerUtils.sanitizeUserArgs; /** - * Hdsl stateless helper functions for server side components. + * Hdds stateless helper functions for server side components. */ -public class HdslServerUtil { +public class HddsServerUtil { private static final Logger LOG = LoggerFactory.getLogger( - HdslServerUtil.class); + HddsServerUtil.class); /** * Retrieve the socket address that should be used by DataNodes to connect diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/VersionInfo.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/VersionInfo.java index 6bb3a22683c..4e520466254 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/VersionInfo.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; /** * This is a class that tracks versions of SCM. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java similarity index 86% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java index 58c1d414245..7213e7e2e3f 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java @@ -17,33 +17,33 @@ */ package org.apache.hadoop.ozone; +import com.google.common.base.Preconditions; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdfs.server.datanode.DataNode; +import org.apache.hadoop.hdfs.server.datanode.DataNodeServicePlugin; +import org.apache.hadoop.hdds.HddsUtils; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; +import org.apache.hadoop.ozone.container.common.statemachine + .DatanodeStateMachine; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.UUID; -import com.google.common.base.Preconditions; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.apache.hadoop.hdfs.server.datanode.DataNodeServicePlugin; -import org.apache.hadoop.hdsl.HdslUtils; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; -import org.apache.hadoop.ozone.container.common.statemachine - .DatanodeStateMachine; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** - * Datanode service plugin to start the HDSL container services. + * Datanode service plugin to start the HDDS container services. */ -public class HdslDatanodeService implements DataNodeServicePlugin { +public class HddsDatanodeService implements DataNodeServicePlugin { private static final Logger LOG = LoggerFactory.getLogger( - HdslDatanodeService.class); + HddsDatanodeService.class); private final boolean isOzoneEnabled; @@ -51,11 +51,11 @@ public class HdslDatanodeService implements DataNodeServicePlugin { private DatanodeDetails datanodeDetails; private DatanodeStateMachine datanodeStateMachine; - public HdslDatanodeService() { + public HddsDatanodeService() { try { OzoneConfiguration.activate(); this.conf = new OzoneConfiguration(); - this.isOzoneEnabled = HdslUtils.isHdslEnabled(conf); + this.isOzoneEnabled = HddsUtils.isHddsEnabled(conf); if (isOzoneEnabled) { this.datanodeDetails = getDatanodeDetails(conf); String hostname = DataNode.getHostName(conf); @@ -64,7 +64,7 @@ public class HdslDatanodeService implements DataNodeServicePlugin { this.datanodeDetails.setIpAddress(ip); } } catch (IOException e) { - throw new RuntimeException("Can't start the HDSL datanode plugin", e); + throw new RuntimeException("Can't start the HDDS datanode plugin", e); } } @@ -78,7 +78,7 @@ public class HdslDatanodeService implements DataNodeServicePlugin { datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf); datanodeStateMachine.startDaemon(); } catch (IOException e) { - throw new RuntimeException("Can't start the HDSL datanode plugin", e); + throw new RuntimeException("Can't start the HDDS datanode plugin", e); } } } @@ -90,7 +90,7 @@ public class HdslDatanodeService implements DataNodeServicePlugin { */ private static DatanodeDetails getDatanodeDetails(Configuration conf) throws IOException { - String idFilePath = HdslUtils.getDatanodeIdFilePath(conf); + String idFilePath = HddsUtils.getDatanodeIdFilePath(conf); if (idFilePath == null || idFilePath.isEmpty()) { LOG.error("A valid file path is needed for config setting {}", ScmConfigKeys.OZONE_SCM_DATANODE_ID); diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java similarity index 91% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java index 180a1160577..68bf4421f67 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java @@ -21,12 +21,13 @@ import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; import org.apache.commons.codec.binary.Hex; import org.apache.commons.codec.digest.DigestUtils; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,22 +41,22 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.concurrent.ExecutionException; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.CHECKSUM_MISMATCH; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.CONTAINER_INTERNAL_ERROR; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.CONTAINER_NOT_FOUND; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.INVALID_WRITE_SIZE; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.IO_EXCEPTION; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.OVERWRITE_FLAG_REQUIRED; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.UNABLE_TO_FIND_CHUNK; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.UNABLE_TO_FIND_DATA_DIR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CHECKSUM_MISMATCH; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CONTAINER_INTERNAL_ERROR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CONTAINER_NOT_FOUND; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .INVALID_WRITE_SIZE; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .IO_EXCEPTION; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .OVERWRITE_FLAG_REQUIRED; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNABLE_TO_FIND_CHUNK; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNABLE_TO_FIND_DATA_DIR; /** * Set of utility functions used by the chunk Manager. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java index bc28e8f0dcf..c29374c07c6 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java @@ -20,10 +20,10 @@ package org.apache.hadoop.ozone.container.common.helpers; import org.apache.commons.codec.digest.DigestUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.scm.ScmConfigKeys; import org.apache.hadoop.util.Time; import java.io.IOException; @@ -49,7 +49,7 @@ public class ContainerData { private AtomicLong bytesUsed; private long maxSize; private Long containerID; - private HdslProtos.LifeCycleState state; + private HddsProtos.LifeCycleState state; /** * Constructs a ContainerData Object. @@ -64,7 +64,7 @@ public class ContainerData { ScmConfigKeys.SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT) * OzoneConsts.GB; this.bytesUsed = new AtomicLong(0L); this.containerID = containerID; - this.state = HdslProtos.LifeCycleState.OPEN; + this.state = HddsProtos.LifeCycleState.OPEN; } /** @@ -135,8 +135,8 @@ public class ContainerData { builder.setState(this.getState()); for (Map.Entry entry : metadata.entrySet()) { - HdslProtos.KeyValue.Builder keyValBuilder = - HdslProtos.KeyValue.newBuilder(); + HddsProtos.KeyValue.Builder keyValBuilder = + HddsProtos.KeyValue.newBuilder(); builder.addMetadata(keyValBuilder.setKey(entry.getKey()) .setValue(entry.getValue()).build()); } @@ -259,11 +259,11 @@ public class ContainerData { return containerID; } - public synchronized void setState(HdslProtos.LifeCycleState state) { + public synchronized void setState(HddsProtos.LifeCycleState state) { this.state = state; } - public synchronized HdslProtos.LifeCycleState getState() { + public synchronized HddsProtos.LifeCycleState getState() { return this.state; } @@ -272,7 +272,7 @@ public class ContainerData { * @return - boolean */ public synchronized boolean isOpen() { - return HdslProtos.LifeCycleState.OPEN == state; + return HddsProtos.LifeCycleState.OPEN == state; } /** @@ -280,7 +280,7 @@ public class ContainerData { */ public synchronized void closeContainer() { // TODO: closed or closing here - setState(HdslProtos.LifeCycleState.CLOSED); + setState(HddsProtos.LifeCycleState.CLOSED); // Some thing brain dead for now. name + Time stamp of when we get the close // container message. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java index 766039efdd4..d4d732b8b69 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java @@ -21,15 +21,15 @@ package org.apache.hadoop.ozone.container.common.helpers; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; -import org.apache.hadoop.metrics2.lib.MutableCounterLong; -import org.apache.hadoop.metrics2.lib.MutableRate; -import org.apache.hadoop.metrics2.lib.MetricsRegistry; -import org.apache.hadoop.metrics2.lib.MutableQuantiles; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.apache.hadoop.metrics2.lib.MetricsRegistry; +import org.apache.hadoop.metrics2.lib.MutableCounterLong; +import org.apache.hadoop.metrics2.lib.MutableQuantiles; +import org.apache.hadoop.metrics2.lib.MutableRate; /** * diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java index 362ef9b8aca..50d2da3975a 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java @@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.container.common.helpers; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo; /** * Container Report iterates the closed containers and sends a container report diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java index 6f115be5596..1818188cb68 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java @@ -22,12 +22,13 @@ import com.google.common.base.Preconditions; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileAlreadyExistsException; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; import org.slf4j.Logger; @@ -41,10 +42,10 @@ import java.nio.file.Path; import java.nio.file.Paths; import static org.apache.commons.io.FilenameUtils.removeExtension; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.INVALID_ARGUMENT; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.UNABLE_TO_FIND_DATA_DIR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .INVALID_ARGUMENT; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNABLE_TO_FIND_DATA_DIR; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_META; @@ -412,7 +413,7 @@ public final class ContainerUtils { } } try (FileOutputStream out = new FileOutputStream(path)) { - HdslProtos.DatanodeDetailsProto proto = + HddsProtos.DatanodeDetailsProto proto = datanodeDetails.getProtoBufMessage(); proto.writeTo(out); } @@ -432,7 +433,7 @@ public final class ContainerUtils { } try(FileInputStream in = new FileInputStream(path)) { return DatanodeDetails.getFromProtoBuf( - HdslProtos.DatanodeDetailsProto.parseFrom(in)); + HddsProtos.DatanodeDetailsProto.parseFrom(in)); } catch (IOException e) { throw new IOException("Failed to parse DatanodeDetails from " + path.getAbsolutePath(), e); diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java index 6fd7f855225..ade162a2637 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java @@ -17,7 +17,7 @@ package org.apache.hadoop.ozone.container.common.helpers; import com.google.common.collect.Maps; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import java.util.List; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java index 75ec505a3be..566db025103 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java @@ -19,7 +19,7 @@ package org.apache.hadoop.ozone.container.common.helpers; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; /** * File Utils are helper routines used by putSmallFile and getSmallFile diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java index 691c517d426..33eb911d4e9 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java @@ -19,18 +19,19 @@ package org.apache.hadoop.ozone.container.common.helpers; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.ozone.container.common.utils.ContainerCache; import org.apache.hadoop.utils.MetadataStore; import java.io.IOException; import java.nio.charset.Charset; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.UNABLE_TO_READ_METADATA_DB; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.NO_SUCH_KEY; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .NO_SUCH_KEY; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNABLE_TO_READ_METADATA_DB; /** * Utils functions to help key functions. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java similarity index 95% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java index 7c950dccf96..b0286b96d40 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java @@ -19,15 +19,16 @@ package org.apache.hadoop.ozone.container.common.impl; import com.google.common.base.Preconditions; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.ozone.container.common.helpers.ContainerData; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.helpers.ChunkUtils; +import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,10 +40,10 @@ import java.nio.file.StandardCopyOption; import java.security.NoSuchAlgorithmException; import java.util.concurrent.ExecutionException; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.CONTAINER_INTERNAL_ERROR; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.UNSUPPORTED_REQUEST; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CONTAINER_INTERNAL_ERROR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNSUPPORTED_REQUEST; /** * An implementation of ChunkManager that is used by default in ozone. diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java new file mode 100644 index 00000000000..5e7375cd9df --- /dev/null +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java @@ -0,0 +1,1113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.container.common.impl; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdfs.server.datanode.StorageLocation; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMStorageReport; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.ozone.OzoneConsts; +import org.apache.hadoop.ozone.container.common.helpers.ContainerData; +import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; +import org.apache.hadoop.ozone.container.common.helpers.KeyData; +import org.apache.hadoop.ozone.container.common.helpers.KeyUtils; +import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager; +import org.apache.hadoop.ozone.container.common.interfaces + .ContainerDeletionChoosingPolicy; +import org.apache.hadoop.ozone.container.common.interfaces + .ContainerLocationManager; +import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; +import org.apache.hadoop.ozone.container.common.interfaces + .ContainerReportManager; +import org.apache.hadoop.ozone.container.common.interfaces.KeyManager; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.utils.MetadataKeyFilters; +import org.apache.hadoop.utils.MetadataStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.FilenameFilter; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.DigestInputStream; +import java.security.DigestOutputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentNavigableMap; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CONTAINER_EXISTS; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CONTAINER_INTERNAL_ERROR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CONTAINER_NOT_FOUND; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .ERROR_IN_COMPACT_DB; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .INVALID_CONFIG; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .IO_EXCEPTION; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .NO_SUCH_ALGORITHM; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNABLE_TO_READ_METADATA_DB; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNCLOSED_CONTAINER_IO; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .UNSUPPORTED_REQUEST; +import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION; +import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_META; + +/** + * A Generic ContainerManagerImpl that will be called from Ozone + * ContainerManagerImpl. This allows us to support delta changes to ozone + * version without having to rewrite the containerManager. + */ +public class ContainerManagerImpl implements ContainerManager { + static final Logger LOG = + LoggerFactory.getLogger(ContainerManagerImpl.class); + + private final ConcurrentSkipListMap + containerMap = new ConcurrentSkipListMap<>(); + + // Use a non-fair RW lock for better throughput, we may revisit this decision + // if this causes fairness issues. + private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + private ContainerLocationManager locationManager; + private ChunkManager chunkManager; + private KeyManager keyManager; + private Configuration conf; + private DatanodeDetails datanodeDetails; + + private ContainerDeletionChoosingPolicy containerDeletionChooser; + private ContainerReportManager containerReportManager; + + /** + * Init call that sets up a container Manager. + * + * @param config - Configuration. + * @param containerDirs - List of Metadata Container locations. + * @param dnDetails - DatanodeDetails. + * @throws IOException + */ + @Override + public void init( + Configuration config, List containerDirs, + DatanodeDetails dnDetails) throws IOException { + Preconditions.checkNotNull(config, "Config must not be null"); + Preconditions.checkNotNull(containerDirs, "Container directories cannot " + + "be null"); + Preconditions.checkNotNull(dnDetails, "Datanode Details cannot " + + "be null"); + + Preconditions.checkState(containerDirs.size() > 0, "Number of container" + + " directories must be greater than zero."); + + this.conf = config; + this.datanodeDetails = dnDetails; + + readLock(); + try { + containerDeletionChooser = ReflectionUtils.newInstance(conf.getClass( + ScmConfigKeys.OZONE_SCM_CONTAINER_DELETION_CHOOSING_POLICY, + TopNOrderedContainerDeletionChoosingPolicy.class, + ContainerDeletionChoosingPolicy.class), conf); + + for (StorageLocation path : containerDirs) { + File directory = Paths.get(path.getNormalizedUri()).toFile(); + if (!directory.exists() && !directory.mkdirs()) { + LOG.error("Container metadata directory doesn't exist " + + "and cannot be created. Path: {}", path.toString()); + throw new StorageContainerException("Container metadata " + + "directory doesn't exist and cannot be created " + path + .toString(), INVALID_CONFIG); + } + + // TODO: This will fail if any directory is invalid. + // We should fix this to handle invalid directories and continue. + // Leaving it this way to fail fast for time being. + if (!directory.isDirectory()) { + LOG.error("Invalid path to container metadata directory. path: {}", + path.toString()); + throw new StorageContainerException("Invalid path to container " + + "metadata directory." + path, INVALID_CONFIG); + } + LOG.info("Loading containers under {}", path); + File[] files = directory.listFiles(new ContainerFilter()); + if (files != null) { + for (File containerFile : files) { + LOG.debug("Loading container {}", containerFile); + String containerPath = + ContainerUtils.getContainerNameFromFile(containerFile); + Preconditions.checkNotNull(containerPath, "Container path cannot" + + " be null"); + readContainerInfo(containerPath); + } + } + } + + List dataDirs = new LinkedList<>(); + for (String dir : config.getStrings(DFS_DATANODE_DATA_DIR_KEY)) { + StorageLocation location = StorageLocation.parse(dir); + dataDirs.add(location); + } + this.locationManager = + new ContainerLocationManagerImpl(containerDirs, dataDirs, config); + + this.containerReportManager = + new ContainerReportManagerImpl(config); + } finally { + readUnlock(); + } + } + + /** + * Reads the Container Info from a file and verifies that checksum match. If + * the checksums match, then that file is added to containerMap. + * + * @param containerName - Name which points to the persisted container. + * @throws StorageContainerException + */ + private void readContainerInfo(String containerName) + throws StorageContainerException { + Preconditions.checkState(containerName.length() > 0, + "Container name length cannot be zero."); + FileInputStream containerStream = null; + DigestInputStream dis = null; + FileInputStream metaStream = null; + Path cPath = Paths.get(containerName).getFileName(); + String keyName = null; + if (cPath != null) { + keyName = cPath.toString(); + } + Preconditions.checkNotNull(keyName, + "Container Name to container key mapping is null"); + + try { + String containerFileName = containerName.concat(CONTAINER_EXTENSION); + String metaFileName = containerName.concat(CONTAINER_META); + + containerStream = new FileInputStream(containerFileName); + + metaStream = new FileInputStream(metaFileName); + + MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH); + + dis = new DigestInputStream(containerStream, sha); + + ContainerProtos.ContainerData containerDataProto = + ContainerProtos.ContainerData.parseDelimitedFrom(dis); + ContainerData containerData; + if (containerDataProto == null) { + // Sometimes container metadata might have been created but empty, + // when loading the info we get a null, this often means last time + // SCM was ending up at some middle phase causing that the metadata + // was not populated. Such containers are marked as inactive. + containerMap.put(keyName, new ContainerStatus(null)); + return; + } + containerData = ContainerData.getFromProtBuf(containerDataProto, conf); + ContainerProtos.ContainerMeta meta = + ContainerProtos.ContainerMeta.parseDelimitedFrom(metaStream); + if (meta != null && !DigestUtils.sha256Hex(sha.digest()) + .equals(meta.getHash())) { + // This means we were not able read data from the disk when booted the + // datanode. We are going to rely on SCM understanding that we don't + // have valid data for this container when we send container reports. + // Hopefully SCM will ask us to delete this container and rebuild it. + LOG.error("Invalid SHA found for container data. Name :{}" + + "cowardly refusing to read invalid data", containerName); + containerMap.put(keyName, new ContainerStatus(null)); + return; + } + + ContainerStatus containerStatus = new ContainerStatus(containerData); + // Initialize pending deletion blocks count in in-memory + // container status. + MetadataStore metadata = KeyUtils.getDB(containerData, conf); + List> underDeletionBlocks = metadata + .getSequentialRangeKVs(null, Integer.MAX_VALUE, + MetadataKeyFilters.getDeletingKeyFilter()); + containerStatus.incrPendingDeletionBlocks(underDeletionBlocks.size()); + + List> liveKeys = metadata + .getRangeKVs(null, Integer.MAX_VALUE, + MetadataKeyFilters.getNormalKeyFilter()); + + // Get container bytesUsed upon loading container + // The in-memory state is updated upon key write or delete + // TODO: update containerDataProto and persist it into container MetaFile + long bytesUsed = 0; + bytesUsed = liveKeys.parallelStream().mapToLong(e-> { + KeyData keyData; + try { + keyData = KeyUtils.getKeyData(e.getValue()); + return keyData.getSize(); + } catch (IOException ex) { + return 0L; + } + }).sum(); + containerStatus.setBytesUsed(bytesUsed); + + containerMap.put(keyName, containerStatus); + } catch (IOException | NoSuchAlgorithmException ex) { + LOG.error("read failed for file: {} ex: {}", containerName, + ex.getMessage()); + + // TODO : Add this file to a recovery Queue. + + // Remember that this container is busted and we cannot use it. + containerMap.put(keyName, new ContainerStatus(null)); + throw new StorageContainerException("Unable to read container info", + UNABLE_TO_READ_METADATA_DB); + } finally { + IOUtils.closeStream(dis); + IOUtils.closeStream(containerStream); + IOUtils.closeStream(metaStream); + } + } + + /** + * Creates a container with the given name. + * + * @param pipeline -- Nodes which make up this container. + * @param containerData - Container Name and metadata. + * @throws StorageContainerException - Exception + */ + @Override + public void createContainer(Pipeline pipeline, ContainerData containerData) + throws StorageContainerException { + Preconditions.checkNotNull(containerData, "Container data cannot be null"); + writeLock(); + try { + if (containerMap.containsKey(containerData.getName())) { + LOG.debug("container already exists. {}", containerData.getName()); + throw new StorageContainerException("container already exists.", + CONTAINER_EXISTS); + } + + // This is by design. We first write and close the + // container Info and metadata to a directory. + // Then read back and put that info into the containerMap. + // This allows us to make sure that our write is consistent. + + writeContainerInfo(containerData, false); + File cFile = new File(containerData.getContainerPath()); + readContainerInfo(ContainerUtils.getContainerNameFromFile(cFile)); + } catch (NoSuchAlgorithmException ex) { + LOG.error("Internal error: We seem to be running a JVM without a " + + "needed hash algorithm."); + throw new StorageContainerException("failed to create container", + NO_SUCH_ALGORITHM); + } finally { + writeUnlock(); + } + + } + + /** + * Writes a container to a chosen location and updates the container Map. + * + * The file formats of ContainerData and Container Meta is the following. + * + * message ContainerData { + * required string name = 1; + * repeated KeyValue metadata = 2; + * optional string dbPath = 3; + * optional string containerPath = 4; + * optional int64 bytesUsed = 5; + * optional int64 size = 6; + * } + * + * message ContainerMeta { + * required string fileName = 1; + * required string hash = 2; + * } + * + * @param containerData - container Data + * @param overwrite - Whether we are overwriting. + * @throws StorageContainerException, NoSuchAlgorithmException + */ + private void writeContainerInfo(ContainerData containerData, + boolean overwrite) + throws StorageContainerException, NoSuchAlgorithmException { + + Preconditions.checkNotNull(this.locationManager, + "Internal error: location manager cannot be null"); + + FileOutputStream containerStream = null; + DigestOutputStream dos = null; + FileOutputStream metaStream = null; + + try { + Path metadataPath = null; + Path location = (!overwrite) ? locationManager.getContainerPath(): + Paths.get(containerData.getContainerPath()).getParent(); + if (location == null) { + throw new StorageContainerException( + "Failed to get container file path.", + CONTAINER_INTERNAL_ERROR); + } + + File containerFile = ContainerUtils.getContainerFile(containerData, + location); + File metadataFile = ContainerUtils.getMetadataFile(containerData, + location); + String containerName = containerData.getContainerName(); + + if(!overwrite) { + ContainerUtils.verifyIsNewContainer(containerFile, metadataFile); + metadataPath = this.locationManager.getDataPath(containerName); + metadataPath = ContainerUtils.createMetadata(metadataPath, + containerName, conf); + } else { + metadataPath = ContainerUtils.getMetadataDirectory(containerData); + } + + containerStream = new FileOutputStream(containerFile); + metaStream = new FileOutputStream(metadataFile); + MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH); + + dos = new DigestOutputStream(containerStream, sha); + containerData.setDBPath(metadataPath.resolve( + ContainerUtils.getContainerDbFileName(containerName)) + .toString()); + containerData.setContainerPath(containerFile.toString()); + + ContainerProtos.ContainerData protoData = containerData + .getProtoBufMessage(); + protoData.writeDelimitedTo(dos); + + ContainerProtos.ContainerMeta protoMeta = ContainerProtos + .ContainerMeta.newBuilder() + .setFileName(containerFile.toString()) + .setHash(DigestUtils.sha256Hex(sha.digest())) + .build(); + protoMeta.writeDelimitedTo(metaStream); + + } catch (IOException ex) { + // TODO : we need to clean up partially constructed files + // The proper way to do would be for a thread + // to read all these 3 artifacts and make sure they are + // sane. That info needs to come from the replication + // pipeline, and if not consistent delete these file. + + // In case of ozone this is *not* a deal breaker since + // SCM is guaranteed to generate unique container names. + // The saving grace is that we check if we have residue files + // lying around when creating a new container. We need to queue + // this information to a cleaner thread. + + LOG.error("Creation of container failed. Name: {}, we might need to " + + "cleanup partially created artifacts. ", + containerData.getContainerName(), ex); + throw new StorageContainerException("Container creation failed. ", + ex, CONTAINER_INTERNAL_ERROR); + } finally { + IOUtils.closeStream(dos); + IOUtils.closeStream(containerStream); + IOUtils.closeStream(metaStream); + } + } + + /** + * Deletes an existing container. + * + * @param pipeline - nodes that make this container. + * @param containerName - name of the container. + * @param forceDelete - whether this container should be deleted forcibly. + * @throws StorageContainerException + */ + @Override + public void deleteContainer(Pipeline pipeline, String containerName, + boolean forceDelete) throws StorageContainerException { + Preconditions.checkNotNull(containerName, "Container name cannot be null"); + Preconditions.checkState(containerName.length() > 0, + "Container name length cannot be zero."); + writeLock(); + try { + if (isOpen(pipeline.getContainerName())) { + throw new StorageContainerException( + "Deleting an open container is not allowed.", + UNCLOSED_CONTAINER_IO); + } + + ContainerStatus status = containerMap.get(containerName); + if (status == null) { + LOG.debug("No such container. Name: {}", containerName); + throw new StorageContainerException("No such container. Name : " + + containerName, CONTAINER_NOT_FOUND); + } + if (status.getContainer() == null) { + LOG.debug("Invalid container data. Name: {}", containerName); + throw new StorageContainerException("Invalid container data. Name : " + + containerName, CONTAINER_NOT_FOUND); + } + ContainerUtils.removeContainer(status.getContainer(), conf, forceDelete); + containerMap.remove(containerName); + } catch (StorageContainerException e) { + throw e; + } catch (IOException e) { + // TODO : An I/O error during delete can leave partial artifacts on the + // disk. We will need the cleaner thread to cleanup this information. + LOG.error("Failed to cleanup container. Name: {}", containerName, e); + throw new StorageContainerException(containerName, e, IO_EXCEPTION); + } finally { + writeUnlock(); + } + } + + /** + * A simple interface for container Iterations. + *

+ * This call make no guarantees about consistency of the data between + * different list calls. It just returns the best known data at that point of + * time. It is possible that using this iteration you can miss certain + * container from the listing. + * + * @param prefix - Return keys that match this prefix. + * @param count - how many to return + * @param prevKey - Previous Key Value or empty String. + * @param data - Actual containerData + * @throws StorageContainerException + */ + @Override + public void listContainer(String prefix, long count, String prevKey, + List data) throws StorageContainerException { + // TODO : Support list with Prefix and PrevKey + Preconditions.checkNotNull(data, + "Internal assertion: data cannot be null"); + readLock(); + try { + ConcurrentNavigableMap map; + if (prevKey == null || prevKey.isEmpty()) { + map = containerMap.tailMap(containerMap.firstKey(), true); + } else { + map = containerMap.tailMap(prevKey, false); + } + + int currentCount = 0; + for (ContainerStatus entry : map.values()) { + if (currentCount < count) { + data.add(entry.getContainer()); + currentCount++; + } else { + return; + } + } + } finally { + readUnlock(); + } + } + + /** + * Get metadata about a specific container. + * + * @param containerName - Name of the container + * @return ContainerData - Container Data. + * @throws StorageContainerException + */ + @Override + public ContainerData readContainer(String containerName) throws + StorageContainerException { + Preconditions.checkNotNull(containerName, "Container name cannot be null"); + Preconditions.checkState(containerName.length() > 0, + "Container name length cannot be zero."); + if (!containerMap.containsKey(containerName)) { + throw new StorageContainerException("Unable to find the container. Name: " + + containerName, CONTAINER_NOT_FOUND); + } + ContainerData cData = containerMap.get(containerName).getContainer(); + if (cData == null) { + throw new StorageContainerException("Invalid container data. Name: " + + containerName, CONTAINER_INTERNAL_ERROR); + } + return cData; + } + + /** + * Closes a open container, if it is already closed or does not exist a + * StorageContainerException is thrown. + * + * @param containerName - Name of the container. + * @throws StorageContainerException + */ + @Override + public void closeContainer(String containerName) + throws StorageContainerException, NoSuchAlgorithmException { + ContainerData containerData = readContainer(containerName); + containerData.closeContainer(); + writeContainerInfo(containerData, true); + MetadataStore db = KeyUtils.getDB(containerData, conf); + + // It is ok if this operation takes a bit of time. + // Close container is not expected to be instantaneous. + try { + db.compactDB(); + } catch (IOException e) { + LOG.error("Error in DB compaction while closing container", e); + throw new StorageContainerException(e, ERROR_IN_COMPACT_DB); + } + + // Active is different from closed. Closed means it is immutable, active + // false means we have some internal error that is happening to this + // container. This is a way to track damaged containers if we have an + // I/O failure, this allows us to take quick action in case of container + // issues. + + ContainerStatus status = new ContainerStatus(containerData); + containerMap.put(containerName, status); + } + + @Override + public void updateContainer(Pipeline pipeline, String containerName, + ContainerData data, boolean forceUpdate) + throws StorageContainerException { + Preconditions.checkNotNull(pipeline, "Pipeline cannot be null"); + Preconditions.checkNotNull(containerName, "Container name cannot be null"); + Preconditions.checkNotNull(data, "Container data cannot be null"); + FileOutputStream containerStream = null; + DigestOutputStream dos = null; + MessageDigest sha = null; + File containerFileBK = null, containerFile = null; + boolean deleted = false; + + if(!containerMap.containsKey(containerName)) { + throw new StorageContainerException("Container doesn't exist. Name :" + + containerName, CONTAINER_NOT_FOUND); + } + + try { + sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH); + } catch (NoSuchAlgorithmException e) { + throw new StorageContainerException("Unable to create Message Digest," + + " usually this is a java configuration issue.", + NO_SUCH_ALGORITHM); + } + + try { + Path location = locationManager.getContainerPath(); + ContainerData orgData = containerMap.get(containerName).getContainer(); + if (orgData == null) { + // updating a invalid container + throw new StorageContainerException("Update a container with invalid" + + "container meta data", CONTAINER_INTERNAL_ERROR); + } + + if (!forceUpdate && !orgData.isOpen()) { + throw new StorageContainerException( + "Update a closed container is not allowed. Name: " + containerName, + UNSUPPORTED_REQUEST); + } + + containerFile = ContainerUtils.getContainerFile(orgData, location); + // If forceUpdate is true, there is no need to check + // whether the container file exists. + if (!forceUpdate) { + if (!containerFile.exists() || !containerFile.canWrite()) { + throw new StorageContainerException( + "Container file not exists or corrupted. Name: " + containerName, + CONTAINER_INTERNAL_ERROR); + } + + // Backup the container file + containerFileBK = File.createTempFile( + "tmp_" + System.currentTimeMillis() + "_", + containerFile.getName(), containerFile.getParentFile()); + FileUtils.copyFile(containerFile, containerFileBK); + + deleted = containerFile.delete(); + containerStream = new FileOutputStream(containerFile); + dos = new DigestOutputStream(containerStream, sha); + + ContainerProtos.ContainerData protoData = data.getProtoBufMessage(); + protoData.writeDelimitedTo(dos); + } + + // Update the in-memory map + ContainerStatus newStatus = new ContainerStatus(data); + containerMap.replace(containerName, newStatus); + } catch (IOException e) { + // Restore the container file from backup + if(containerFileBK != null && containerFileBK.exists() && deleted) { + if(containerFile.delete() + && containerFileBK.renameTo(containerFile)) { + throw new StorageContainerException("Container update failed," + + " container data restored from the backup.", + CONTAINER_INTERNAL_ERROR); + } else { + throw new StorageContainerException( + "Failed to restore container data from the backup. Name: " + + containerName, CONTAINER_INTERNAL_ERROR); + } + } else { + throw new StorageContainerException( + e.getMessage(), CONTAINER_INTERNAL_ERROR); + } + } finally { + if (containerFileBK != null && containerFileBK.exists()) { + if(!containerFileBK.delete()) { + LOG.warn("Unable to delete container file backup : {}.", + containerFileBK.getAbsolutePath()); + } + } + IOUtils.closeStream(dos); + IOUtils.closeStream(containerStream); + } + } + + @VisibleForTesting + protected File getContainerFile(ContainerData data) throws IOException { + return ContainerUtils.getContainerFile(data, + this.locationManager.getContainerPath()); + } + + /** + * Checks if a container exists. + * + * @param containerName - Name of the container. + * @return true if the container is open false otherwise. + * @throws StorageContainerException - Throws Exception if we are not able to + * find the container. + */ + @Override + public boolean isOpen(String containerName) throws StorageContainerException { + final ContainerStatus status = containerMap.get(containerName); + if (status == null) { + throw new StorageContainerException( + "Container status not found: " + containerName, CONTAINER_NOT_FOUND); + } + final ContainerData cData = status.getContainer(); + if (cData == null) { + throw new StorageContainerException( + "Container not found: " + containerName, CONTAINER_NOT_FOUND); + } + return cData.isOpen(); + } + + /** + * Supports clean shutdown of container. + * + * @throws IOException + */ + @Override + public void shutdown() throws IOException { + Preconditions.checkState(this.hasWriteLock(), + "Assumption that we are holding the lock violated."); + this.containerMap.clear(); + this.locationManager.shutdown(); + } + + + @VisibleForTesting + public ConcurrentSkipListMap getContainerMap() { + return containerMap; + } + + /** + * Acquire read lock. + */ + @Override + public void readLock() { + this.lock.readLock().lock(); + + } + + @Override + public void readLockInterruptibly() throws InterruptedException { + this.lock.readLock().lockInterruptibly(); + } + + /** + * Release read lock. + */ + @Override + public void readUnlock() { + this.lock.readLock().unlock(); + } + + /** + * Check if the current thread holds read lock. + */ + @Override + public boolean hasReadLock() { + return this.lock.readLock().tryLock(); + } + + /** + * Acquire write lock. + */ + @Override + public void writeLock() { + this.lock.writeLock().lock(); + } + + /** + * Acquire write lock, unless interrupted while waiting. + */ + @Override + public void writeLockInterruptibly() throws InterruptedException { + this.lock.writeLock().lockInterruptibly(); + + } + + /** + * Release write lock. + */ + @Override + public void writeUnlock() { + this.lock.writeLock().unlock(); + + } + + /** + * Check if the current thread holds write lock. + */ + @Override + public boolean hasWriteLock() { + return this.lock.writeLock().isHeldByCurrentThread(); + } + + public ChunkManager getChunkManager() { + return this.chunkManager; + } + + /** + * Sets the chunk Manager. + * + * @param chunkManager - Chunk Manager + */ + public void setChunkManager(ChunkManager chunkManager) { + this.chunkManager = chunkManager; + } + + /** + * Gets the Key Manager. + * + * @return KeyManager. + */ + @Override + public KeyManager getKeyManager() { + return this.keyManager; + } + + /** + * Get the node report. + * @return node report. + */ + @Override + public SCMNodeReport getNodeReport() throws IOException { + StorageLocationReport[] reports = locationManager.getLocationReport(); + SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder(); + for (int i = 0; i < reports.length; i++) { + SCMStorageReport.Builder srb = SCMStorageReport.newBuilder(); + nrb.addStorageReport(i, srb.setStorageUuid(reports[i].getId()) + .setCapacity(reports[i].getCapacity()) + .setScmUsed(reports[i].getScmUsed()) + .setRemaining(reports[i].getRemaining()) + .build()); + } + return nrb.build(); + } + + + /** + * Gets container reports. + * + * @return List of all closed containers. + * @throws IOException + */ + @Override + public List getContainerReports() throws IOException { + LOG.debug("Starting container report iteration."); + // No need for locking since containerMap is a ConcurrentSkipListMap + // And we can never get the exact state since close might happen + // after we iterate a point. + return containerMap.entrySet().stream() + .filter(containerStatus -> + !containerStatus.getValue().getContainer().isOpen()) + .map(containerStatus -> containerStatus.getValue().getContainer()) + .collect(Collectors.toList()); + } + + /** + * Get container report. + * + * @return The container report. + * @throws IOException + */ + @Override + public ContainerReportsRequestProto getContainerReport() throws IOException { + LOG.debug("Starting container report iteration."); + // No need for locking since containerMap is a ConcurrentSkipListMap + // And we can never get the exact state since close might happen + // after we iterate a point. + List containers = containerMap.values().stream() + .collect(Collectors.toList()); + + ContainerReportsRequestProto.Builder crBuilder = + ContainerReportsRequestProto.newBuilder(); + + // TODO: support delta based container report + crBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage()) + .setType(ContainerReportsRequestProto.reportType.fullReport); + + for (ContainerStatus container: containers) { + StorageContainerDatanodeProtocolProtos.ContainerInfo.Builder ciBuilder = + StorageContainerDatanodeProtocolProtos.ContainerInfo.newBuilder(); + ciBuilder.setContainerName(container.getContainer().getContainerName()) + .setSize(container.getContainer().getMaxSize()) + .setUsed(container.getContainer().getBytesUsed()) + .setKeyCount(container.getContainer().getKeyCount()) + .setReadCount(container.getReadCount()) + .setWriteCount(container.getWriteCount()) + .setReadBytes(container.getReadBytes()) + .setWriteBytes(container.getWriteBytes()) + .setContainerID(container.getContainer().getContainerID()); + + if (container.getContainer().getHash() != null) { + ciBuilder.setFinalhash(container.getContainer().getHash()); + } + crBuilder.addReports(ciBuilder.build()); + } + + return crBuilder.build(); + } + + /** + * Sets the Key Manager. + * + * @param keyManager - Key Manager. + */ + @Override + public void setKeyManager(KeyManager keyManager) { + this.keyManager = keyManager; + } + + /** + * Filter out only container files from the container metadata dir. + */ + private static class ContainerFilter implements FilenameFilter { + /** + * Tests if a specified file should be included in a file list. + * + * @param dir the directory in which the file was found. + * @param name the name of the file. + * @return true if and only if the name should be included in + * the file list; false otherwise. + */ + @Override + public boolean accept(File dir, String name) { + return name.endsWith(CONTAINER_EXTENSION); + } + } + + @Override + public List chooseContainerForBlockDeletion( + int count) throws StorageContainerException { + readLock(); + try { + return containerDeletionChooser.chooseContainerForBlockDeletion( + count, containerMap); + } finally { + readUnlock(); + } + } + + @VisibleForTesting + public ContainerDeletionChoosingPolicy getContainerDeletionChooser() { + return containerDeletionChooser; + } + + @Override + public void incrPendingDeletionBlocks(int numBlocks, String containerId) { + writeLock(); + try { + ContainerStatus status = containerMap.get(containerId); + status.incrPendingDeletionBlocks(numBlocks); + } finally { + writeUnlock(); + } + } + + @Override + public void decrPendingDeletionBlocks(int numBlocks, String containerId) { + writeLock(); + try { + ContainerStatus status = containerMap.get(containerId); + status.decrPendingDeletionBlocks(numBlocks); + } finally { + writeUnlock(); + } + } + + /** + * Increase the read count of the container. + * + * @param containerName - Name of the container. + */ + @Override + public void incrReadCount(String containerName) { + ContainerStatus status = containerMap.get(containerName); + status.incrReadCount(); + } + + public long getReadCount(String containerName) { + ContainerStatus status = containerMap.get(containerName); + return status.getReadCount(); + } + + /** + * Increse the read counter for bytes read from the container. + * + * @param containerName - Name of the container. + * @param readBytes - bytes read from the container. + */ + @Override + public void incrReadBytes(String containerName, long readBytes) { + ContainerStatus status = containerMap.get(containerName); + status.incrReadBytes(readBytes); + } + + public long getReadBytes(String containerName) { + readLock(); + try { + ContainerStatus status = containerMap.get(containerName); + return status.getReadBytes(); + } finally { + readUnlock(); + } + } + + /** + * Increase the write count of the container. + * + * @param containerName - Name of the container. + */ + @Override + public void incrWriteCount(String containerName) { + ContainerStatus status = containerMap.get(containerName); + status.incrWriteCount(); + } + + public long getWriteCount(String containerName) { + ContainerStatus status = containerMap.get(containerName); + return status.getWriteCount(); + } + + /** + * Increse the write counter for bytes write into the container. + * + * @param containerName - Name of the container. + * @param writeBytes - bytes write into the container. + */ + @Override + public void incrWriteBytes(String containerName, long writeBytes) { + ContainerStatus status = containerMap.get(containerName); + status.incrWriteBytes(writeBytes); + } + + public long getWriteBytes(String containerName) { + ContainerStatus status = containerMap.get(containerName); + return status.getWriteBytes(); + } + + /** + * Increase the bytes used by the container. + * + * @param containerName - Name of the container. + * @param used - additional bytes used by the container. + * @return the current bytes used. + */ + @Override + public long incrBytesUsed(String containerName, long used) { + ContainerStatus status = containerMap.get(containerName); + return status.incrBytesUsed(used); + } + + /** + * Decrease the bytes used by the container. + * + * @param containerName - Name of the container. + * @param used - additional bytes reclaimed by the container. + * @return the current bytes used. + */ + @Override + public long decrBytesUsed(String containerName, long used) { + ContainerStatus status = containerMap.get(containerName); + return status.decrBytesUsed(used); + } + + public long getBytesUsed(String containerName) { + ContainerStatus status = containerMap.get(containerName); + return status.getBytesUsed(); + } + + /** + * Get the number of keys in the container. + * + * @param containerName - Name of the container. + * @return the current key count. + */ + @Override + public long getNumKeys(String containerName) { + ContainerStatus status = containerMap.get(containerName); + return status.getNumKeys(); } + + /** + * Get the container report state to send via HB to SCM. + * + * @return container report state. + */ + @Override + public ReportState getContainerReportState() { + return containerReportManager.getContainerReportState(); + } + +} diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java similarity index 92% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java index a300767706e..6c83c66a3a8 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java @@ -18,18 +18,19 @@ package org.apache.hadoop.ozone.container.common.impl; import org.apache.commons.lang3.RandomUtils; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmHeartbeatInterval; - import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.ozone.container.common.interfaces.ContainerReportManager; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.ozone.container.common.interfaces + .ContainerReportManager; import org.apache.hadoop.util.Time; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import static org.apache.hadoop.hdds.scm.HddsServerUtil.getScmHeartbeatInterval; + /** * Class wraps the container report operations on datanode. * // TODO: support incremental/delta container report diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java index 07a3a539e73..7293895295c 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java @@ -32,10 +32,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; -import java.io.IOException; import java.io.FileNotFoundException; -import java.io.OutputStreamWriter; import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; import java.net.URI; import java.nio.charset.StandardCharsets; import java.nio.file.Paths; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java index a1690b5a6d3..1c6e39ccdfc 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java @@ -21,12 +21,15 @@ package org.apache.hadoop.ozone.container.common.impl; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos .ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Type; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.helpers.ChunkUtils; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; @@ -37,8 +40,6 @@ import org.apache.hadoop.ozone.container.common.helpers.KeyData; import org.apache.hadoop.ozone.container.common.helpers.KeyUtils; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,10 +48,14 @@ import java.security.NoSuchAlgorithmException; import java.util.LinkedList; import java.util.List; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Result.CLOSED_CONTAINER_IO; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Result.GET_SMALL_FILE_ERROR; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Result.NO_SUCH_ALGORITHM; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Result.PUT_SMALL_FILE_ERROR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .CLOSED_CONTAINER_IO; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .GET_SMALL_FILE_ERROR; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .NO_SUCH_ALGORITHM; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .PUT_SMALL_FILE_ERROR; /** * Ozone Container dispatcher takes a call from the netty server and routes it diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java similarity index 95% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java index c195ad22a0b..cf6bf12214d 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java @@ -20,16 +20,17 @@ package org.apache.hadoop.ozone.container.common.impl; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.helpers.KeyData; import org.apache.hadoop.ozone.container.common.helpers.KeyUtils; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; import org.apache.hadoop.ozone.container.common.interfaces.KeyManager; import org.apache.hadoop.ozone.container.common.utils.ContainerCache; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.apache.hadoop.utils.MetadataStore; @@ -41,8 +42,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos - .Result.NO_SUCH_KEY; +import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result + .NO_SUCH_KEY; /** * Key Manager impl. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java similarity index 92% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java index 0123ab1b445..3e267d2b37e 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.ozone.container.common.impl; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - +import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; -import org.apache.hadoop.ozone.container.common.interfaces.ContainerDeletionChoosingPolicy; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.ozone.container.common.interfaces + .ContainerDeletionChoosingPolicy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Preconditions; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; /** * Randomly choosing containers for block deletion. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java similarity index 94% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java index 3f4cdaa2c5e..0169a96cf98 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java @@ -17,20 +17,21 @@ */ package org.apache.hadoop.ozone.container.common.impl; +import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.ozone.container.common.helpers.ContainerData; +import org.apache.hadoop.ozone.container.common.interfaces + .ContainerDeletionChoosingPolicy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Map; -import org.apache.hadoop.ozone.container.common.helpers.ContainerData; -import org.apache.hadoop.ozone.container.common.interfaces.ContainerDeletionChoosingPolicy; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Preconditions; - /** * TopN Ordered choosing policy that choosing containers based on pending * deletion blocks' number. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java similarity index 91% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java index a7d3fe536da..f55d74ca2b2 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java @@ -18,10 +18,11 @@ package org.apache.hadoop.ozone.container.common.interfaces; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; /** * Chunk Manager allows read, write, delete and listing of chunks in diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java similarity index 94% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java index 3e0a28331e1..f7280e2a3c6 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.ozone.container.common.interfaces; -import java.util.List; -import java.util.Map; - +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.impl.ContainerStatus; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; + +import java.util.List; +import java.util.Map; /** * This interface is used for choosing desired containers for diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java similarity index 88% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java index cb3fdeadd6f..984fe41b91e 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java @@ -18,8 +18,10 @@ package org.apache.hadoop.ozone.container.common.interfaces; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandResponseProto; /** * Dispatcher acts as the bridge between the transport layer and diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java index 3f19992b738..2ff636e87fd 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java @@ -21,17 +21,20 @@ package org.apache.hadoop.ozone.container.common.interfaces; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; import org.apache.hadoop.hdfs.server.datanode.StorageLocation; import org.apache.hadoop.hdfs.util.RwLock; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; - import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.util.List; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java similarity index 94% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java index 6485d3f46a5..4689dfe9e1c 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.ozone.container.common.interfaces; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; /** * Interface for container report manager operations. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java index a613d2aca0f..8c27ba94c42 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.ozone.container.common.interfaces; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; import org.apache.hadoop.ozone.container.common.helpers.KeyData; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import java.io.IOException; import java.util.List; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java index 91fa9c37585..8e9482f5653 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java @@ -19,19 +19,20 @@ package org.apache.hadoop.ozone.container.common.statemachine; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.container.common.statemachine.commandhandler .CloseContainerHandler; -import org.apache.hadoop.ozone.container.common.statemachine.commandhandler.CommandDispatcher; -import org.apache.hadoop.ozone.container.common.statemachine.commandhandler.ContainerReportHandler; -import org.apache.hadoop.ozone.container.common.statemachine.commandhandler.DeleteBlocksCommandHandler; +import org.apache.hadoop.ozone.container.common.statemachine.commandhandler + .CommandDispatcher; +import org.apache.hadoop.ozone.container.common.statemachine.commandhandler + .ContainerReportHandler; +import org.apache.hadoop.ozone.container.common.statemachine.commandhandler + .DeleteBlocksCommandHandler; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.concurrent.HadoopExecutors; - -import static org.apache.hadoop.ozone.scm.HdslServerUtil.getScmHeartbeatInterval; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,6 +42,8 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import static org.apache.hadoop.hdds.scm.HddsServerUtil.getScmHeartbeatInterval; + /** * State Machine Class. */ @@ -220,7 +223,7 @@ public class DatanodeStateMachine implements Closeable { private final int value; /** - * Constructs ContainerStates. + * Constructs states. * * @param value Enum Value */ diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java index 61bc91e318a..7e85923d315 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java @@ -20,10 +20,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ozone.protocol.VersionResponse; import org.apache.hadoop.ozone.protocolPB .StorageContainerDatanodeProtocolClientSideTranslatorPB; - -import static org.apache.hadoop.ozone.scm.HdslServerUtil.getLogWarnInterval; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmHeartbeatInterval; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,6 +31,9 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import static org.apache.hadoop.hdds.scm.HddsServerUtil.getLogWarnInterval; +import static org.apache.hadoop.hdds.scm.HddsServerUtil.getScmHeartbeatInterval; + /** * Endpoint is used as holder class that keeps state around the RPC endpoint. */ diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java index c9f83c61c5b..19722f04a52 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java @@ -26,9 +26,6 @@ import org.apache.hadoop.ozone.protocolPB .StorageContainerDatanodeProtocolClientSideTranslatorPB; import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB; import org.apache.hadoop.security.UserGroupInformation; - -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmRpcTimeOutInMilliseconds; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,10 +33,18 @@ import javax.management.ObjectName; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import static org.apache.hadoop.hdds.scm.HddsServerUtil + .getScmRpcTimeOutInMilliseconds; + /** * SCMConnectionManager - Acts as a class that manages the membership * information of the SCMs that we are working with. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java similarity index 94% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java index e4d6cd997c8..55476fd41e1 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java @@ -17,12 +17,16 @@ package org.apache.hadoop.ozone.container.common.statemachine; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ozone.container.common.states.datanode.InitDatanodeState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; import org.apache.hadoop.ozone.container.common.states.DatanodeState; -import org.apache.hadoop.ozone.container.common.states.datanode.RunningDatanodeState; +import org.apache.hadoop.ozone.container.common.states.datanode + .InitDatanodeState; +import org.apache.hadoop.ozone.container.common.states.datanode + .RunningDatanodeState; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,10 +40,10 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import static org.apache.hadoop.ozone.OzoneConsts.INVALID_PORT; -import static org.apache.hadoop.hdsl.protocol.proto +import static org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ReportState.states .noContainerReports; +import static org.apache.hadoop.ozone.OzoneConsts.INVALID_PORT; /** * Current Context of State Machine. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java index d5df699293a..ac95b2a12cb 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java @@ -22,39 +22,40 @@ import com.google.common.collect.Lists; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.container.common.helpers + .StorageContainerException; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.helpers.KeyUtils; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.BackgroundService; -import org.apache.hadoop.utils.BackgroundTaskResult; -import org.apache.hadoop.utils.BackgroundTaskQueue; import org.apache.hadoop.utils.BackgroundTask; +import org.apache.hadoop.utils.BackgroundTaskQueue; +import org.apache.hadoop.utils.BackgroundTaskResult; import org.apache.hadoop.utils.BatchOperation; -import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; +import org.apache.hadoop.utils.MetadataStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; -import java.util.List; import java.util.LinkedList; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.apache.hadoop.ozone.OzoneConfigKeys - .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER; -import static org.apache.hadoop.ozone.OzoneConfigKeys - .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER_DEFAULT; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER_DEFAULT; /** * A per-datanode container block deleting service takes in charge diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java index e872555a36b..f7b49b75905 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java @@ -16,15 +16,15 @@ */ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCloseContainerCmdResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.ozone.container.common.statemachine .SCMConnectionManager; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCloseContainerCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java similarity index 98% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java index fee3e1c2a8b..40feca32bd0 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java @@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java index b54923e58e4..13d9f7295d9 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java @@ -17,7 +17,7 @@ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java similarity index 89% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java index e9f4b61ce88..ba6b4185df3 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java @@ -16,13 +16,17 @@ */ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; -import org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine; -import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.ozone.container.common.statemachine + .EndpointStateMachine; +import org.apache.hadoop.ozone.container.common.statemachine + .SCMConnectionManager; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java similarity index 90% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java index ff38cdccea7..f106e3d55fc 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java @@ -16,24 +16,31 @@ */ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto + .DeleteBlockTransactionResult; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; -import org.apache.hadoop.ozone.container.common.helpers.DeletedContainerBlocksSummary; +import org.apache.hadoop.ozone.container.common.helpers + .DeletedContainerBlocksSummary; import org.apache.hadoop.ozone.container.common.helpers.KeyUtils; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine; -import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager; +import org.apache.hadoop.ozone.container.common.statemachine + .EndpointStateMachine; +import org.apache.hadoop.ozone.container.common.statemachine + .SCMConnectionManager; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto.DeleteBlockTransactionResult; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.utils.MetadataStore; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java similarity index 91% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java index 08f47a26e56..ac245d511cf 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java @@ -18,15 +18,15 @@ package org.apache.hadoop.ozone.container.common.states.datanode; import com.google.common.base.Strings; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.HdslUtils; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.HddsUtils; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; -import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine; -import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager; +import org.apache.hadoop.ozone.container.common.statemachine + .DatanodeStateMachine; +import org.apache.hadoop.ozone.container.common.statemachine + .SCMConnectionManager; import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.common.states.DatanodeState; - -import org.apache.hadoop.scm.ScmConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,7 +41,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.apache.hadoop.hdsl.HdslUtils.getSCMAddresses; +import static org.apache.hadoop.hdds.HddsUtils.getSCMAddresses; /** * Init Datanode State is the task that gets run when we are in Init State. @@ -106,7 +106,7 @@ public class InitDatanodeState implements DatanodeState, * Persist DatanodeDetails to datanode.id file. */ private void persistContainerDatanodeDetails() throws IOException { - String dataNodeIDPath = HdslUtils.getDatanodeIdFilePath(conf); + String dataNodeIDPath = HddsUtils.getDatanodeIdFilePath(conf); File idPath = new File(dataNodeIDPath); DatanodeDetails datanodeDetails = this.context.getParent() .getDatanodeDetails(); diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java index 29f1f9c2daf..5dee10f44b4 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java @@ -20,8 +20,12 @@ package org.apache.hadoop.ozone.container.common.states.endpoint; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; import org.apache.hadoop.ozone.container.common.helpers .DeletedContainerBlocksSummary; import org.apache.hadoop.ozone.container.common.statemachine @@ -32,10 +36,6 @@ import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand; import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand; import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java index bfe6a2885ba..6913896e406 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java @@ -18,12 +18,11 @@ package org.apache.hadoop.ozone.container.common.states.endpoint; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; import org.apache.hadoop.ozone.container.common.statemachine .EndpointStateMachine; - -import org.apache.hadoop.scm.ScmConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java similarity index 91% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java index 502d82700b2..b048ee5b5ca 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java @@ -17,9 +17,11 @@ package org.apache.hadoop.ozone.container.common.states.endpoint; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; +import org.apache.hadoop.ozone.container.common.statemachine + .EndpointStateMachine; import org.apache.hadoop.ozone.protocol.VersionResponse; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; import java.io.IOException; import java.util.concurrent.Callable; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java similarity index 95% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java index bd180eff839..50e45b45bf6 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java @@ -27,10 +27,10 @@ import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.logging.LogLevel; import io.netty.handler.logging.LoggingHandler; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -94,8 +94,8 @@ public final class XceiverServer implements XceiverServerSpi { * @return enum -- {Stand_Alone, Ratis, Chained} */ @Override - public HdslProtos.ReplicationType getServerType() { - return HdslProtos.ReplicationType.STAND_ALONE; + public HddsProtos.ReplicationType getServerType() { + return HddsProtos.ReplicationType.STAND_ALONE; } @Override diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java index 6c42c84e12a..5947dde75db 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java @@ -20,11 +20,13 @@ package org.apache.hadoop.ozone.container.common.transport.server; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; -import org.slf4j.LoggerFactory; -import org.slf4j.Logger; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandResponseProto; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Netty server handlers that respond to Network events. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java index 036a654953f..78ba26b4de1 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java @@ -26,8 +26,9 @@ import io.netty.handler.codec.protobuf.ProtobufDecoder; import io.netty.handler.codec.protobuf.ProtobufEncoder; import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandRequestProto; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto; /** * Creates a channel for the XceiverServer. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java similarity index 92% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java index 09bd6fcdab7..dad9e9f3fde 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java @@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.container.common.transport.server; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.IOException; @@ -38,6 +38,6 @@ public interface XceiverServerSpi { * Returns the Replication type supported by this end-point. * @return enum -- {Stand_Alone, Ratis, Chained} */ - HdslProtos.ReplicationType getServerType(); + HddsProtos.ReplicationType getServerType(); } diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java index 08a96149c6f..1a89e44bd18 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java @@ -19,33 +19,36 @@ package org.apache.hadoop.ozone.container.common.transport.server.ratis; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.WriteChunkRequestProto; +import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .ContainerCommandResponseProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos + .WriteChunkRequestProto; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.ratis.conf.RaftProperties; import org.apache.ratis.protocol.Message; import org.apache.ratis.protocol.RaftClientRequest; import org.apache.ratis.protocol.RaftPeerId; import org.apache.ratis.server.storage.RaftStorage; +import org.apache.ratis.shaded.com.google.protobuf.ByteString; import org.apache.ratis.shaded.com.google.protobuf.ShadedProtoUtil; import org.apache.ratis.shaded.proto.RaftProtos.LogEntryProto; import org.apache.ratis.shaded.proto.RaftProtos.SMLogEntryProto; -import org.apache.ratis.statemachine.impl.BaseStateMachine; -import org.apache.ratis.statemachine.impl.SimpleStateMachineStorage; import org.apache.ratis.statemachine.StateMachineStorage; import org.apache.ratis.statemachine.TransactionContext; +import org.apache.ratis.statemachine.impl.BaseStateMachine; +import org.apache.ratis.statemachine.impl.SimpleStateMachineStorage; import org.apache.ratis.statemachine.impl.TransactionContextImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.ratis.shaded.com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ThreadPoolExecutor; /** A {@link org.apache.ratis.statemachine.StateMachine} for containers. * @@ -54,7 +57,7 @@ import java.util.concurrent.ConcurrentHashMap; * requests. * * Read only requests are classified in - * {@link org.apache.hadoop.scm.XceiverClientRatis#isReadOnly} + * {@link org.apache.hadoop.hdds.scm.XceiverClientRatis#isReadOnly} * and these readonly requests are replied from the {@link #query(Message)}. * * The write requests can be divided into requests with user data diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java index 7aee5bb4041..4bd55f1b99f 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java @@ -21,18 +21,17 @@ package org.apache.hadoop.ozone.container.common.transport.server.ratis; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.hadoop.ozone.container.common.transport.server .XceiverServerSpi; - -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; import org.apache.ratis.RaftConfigKeys; +import org.apache.ratis.RatisHelper; import org.apache.ratis.conf.RaftProperties; import org.apache.ratis.grpc.GrpcConfigKeys; import org.apache.ratis.netty.NettyConfigKeys; -import org.apache.ratis.RatisHelper; import org.apache.ratis.rpc.RpcType; import org.apache.ratis.rpc.SupportedRpcType; import org.apache.ratis.server.RaftServer; @@ -209,7 +208,7 @@ public final class XceiverServerRatis implements XceiverServerSpi { * @return enum -- {Stand_Alone, Ratis, Chained} */ @Override - public HdslProtos.ReplicationType getServerType() { - return HdslProtos.ReplicationType.RATIS; + public HddsProtos.ReplicationType getServerType() { + return HddsProtos.ReplicationType.RATIS; } } diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java similarity index 90% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java index 0ef9406f452..33a5971bbb3 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java @@ -20,7 +20,14 @@ package org.apache.hadoop.ozone.container.ozoneimpl; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.datanode.StorageLocation; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.helpers.ContainerData; import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl; @@ -31,15 +38,13 @@ import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager; import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; import org.apache.hadoop.ozone.container.common.interfaces.KeyManager; -import org.apache.hadoop.ozone.container.common.statemachine.background.BlockDeletingService; +import org.apache.hadoop.ozone.container.common.statemachine.background + .BlockDeletingService; import org.apache.hadoop.ozone.container.common.transport.server.XceiverServer; -import org.apache.hadoop.ozone.container.common.transport.server.ratis.XceiverServerRatis; - -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerSpi; +import org.apache.hadoop.ozone.container.common.transport.server + .XceiverServerSpi; +import org.apache.hadoop.ozone.container.common.transport.server.ratis + .XceiverServerRatis; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,14 +55,16 @@ import java.util.List; import java.util.concurrent.TimeUnit; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; -import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_BLOCK_DELETING_SERVICE_INTERVAL; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT; +import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX; import static org.apache.hadoop.ozone.OzoneConsts.INVALID_PORT; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_TIMEOUT; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT; /** * Ozone main class sets up the network server and initializes the container @@ -209,7 +216,7 @@ public class OzoneContainer { return this.manager.getNodeReport(); } - private int getPortbyType(HdslProtos.ReplicationType replicationType) { + private int getPortbyType(HddsProtos.ReplicationType replicationType) { for (XceiverServerSpi serverinstance : server) { if (serverinstance.getServerType() == replicationType) { return serverinstance.getIPCPort(); @@ -224,7 +231,7 @@ public class OzoneContainer { * @return Container server IPC port. */ public int getContainerServerPort() { - return getPortbyType(HdslProtos.ReplicationType.STAND_ALONE); + return getPortbyType(HddsProtos.ReplicationType.STAND_ALONE); } /** @@ -233,7 +240,7 @@ public class OzoneContainer { * @return Ratis port. */ public int getRatisContainerServerPort() { - return getPortbyType(HdslProtos.ReplicationType.RATIS); + return getPortbyType(HddsProtos.ReplicationType.RATIS); } /** diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java similarity index 68% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java index fbb84265571..43e7412aeae 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java @@ -17,17 +17,29 @@ package org.apache.hadoop.ozone.protocol; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos + .ContainerBlocksDeletionACKResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; + import java.io.IOException; /** diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java similarity index 92% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java index fffbfd1d4fc..1fc7c570ef6 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java @@ -18,14 +18,14 @@ package org.apache.hadoop.ozone.protocol; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; +import org.apache.hadoop.ozone.protocol.commands.SCMCommand; import java.util.List; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java index c2dc2b3bcb7..83acf5bd6e3 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java @@ -19,8 +19,8 @@ package org.apache.hadoop.ozone.protocol; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.KeyValue; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; import java.util.HashMap; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java index f9571e9bfc5..b1cdbc49136 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java @@ -18,11 +18,12 @@ package org.apache.hadoop.ozone.protocol.commands; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMCloseContainerCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMCmdType; -import static org.apache.hadoop.hdsl.protocol.proto + +import static org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMCmdType.closeContainerCommand; /** diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java similarity index 84% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java index c5816581d41..a11ca25a304 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java @@ -17,9 +17,12 @@ */ package org.apache.hadoop.ozone.protocol.commands; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMDeleteBlocksCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMDeleteBlocksCmdResponseProto; import java.util.List; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java similarity index 97% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java index c4db6f7ef97..88b59110f47 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java @@ -18,13 +18,13 @@ package org.apache.hadoop.ozone.protocol.commands; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto .ErrorCode; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; /** * Response to Datanode Register call. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java similarity index 91% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java index 0c55a90ca76..c167d59ddc1 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.ozone.protocol.commands; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMCmdType; -import static org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto; -import static org.apache.hadoop.hdsl.protocol.proto + +import static org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMCmdType.reregisterCommand; +import static org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto; /** * Informs a datanode to register itself with SCM again. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java index e79a157a9c1..73e4194d8cf 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.ozone.protocol.commands; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; import com.google.protobuf.GeneratedMessage; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; /** * A class that acts as the base class to convert between Java and SCM diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java similarity index 96% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java index fa2245b4adb..84317526e7a 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java @@ -18,10 +18,10 @@ package org.apache.hadoop.ozone.protocol.commands; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SendContainerReportProto; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SendContainerReportProto; /** * Allows a Datanode to send in the container report. diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java similarity index 93% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java index 03854a05939..a718fa7476f 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java @@ -19,5 +19,5 @@ package org.apache.hadoop.ozone.protocol; /** - * This package contains classes for HDSL protocol definitions. + * This package contains classes for HDDS protocol definitions. */ diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java similarity index 85% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java index 4abd8a6596f..12fed1cd0f2 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java @@ -18,29 +18,36 @@ package org.apache.hadoop.ozone.protocolPB; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos + .ContainerBlocksDeletionACKResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMRegisterRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisterRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; import java.io.Closeable; import java.io.IOException; diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java similarity index 90% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java index 677a364afef..9b28b5ae98d 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java @@ -16,8 +16,10 @@ */ package org.apache.hadoop.ozone.protocolPB; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos + .StorageContainerDatanodeProtocolService; import org.apache.hadoop.ipc.ProtocolInfo; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.StorageContainerDatanodeProtocolService; /** * Protocol used from a datanode to StorageContainerManager. This extends diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java similarity index 81% rename from hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java rename to hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java index cd2fb59c224..985b75acd67 100644 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java @@ -18,14 +18,22 @@ package org.apache.hadoop.ozone.protocolPB; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos + .ContainerBlocksDeletionACKResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; import java.io.IOException; diff --git a/hadoop-hdsl/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto b/hadoop-hdds/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto similarity index 97% rename from hadoop-hdsl/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto rename to hadoop-hdds/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto index 187ecda25ca..cd415e2bca7 100644 --- a/hadoop-hdsl/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto +++ b/hadoop-hdds/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto @@ -22,7 +22,7 @@ * for what changes are allowed for a *unstable* .proto interface. */ -option java_package = "org.apache.hadoop.hdsl.protocol.proto"; +option java_package = "org.apache.hadoop.hdds.protocol.proto"; option java_outer_classname = "StorageContainerDatanodeProtocolProtos"; @@ -30,9 +30,9 @@ option java_generic_services = true; option java_generate_equals_and_hash = true; -package hadoop.hdsl; +package hadoop.hdds; -import "hdsl.proto"; +import "hdds.proto"; /** @@ -71,7 +71,7 @@ all container info in memory all the time. */ message ContainerPersistanceProto { required DatanodeContainerState state = 1; - required hadoop.hdsl.Pipeline pipeline = 2; + required hadoop.hdds.Pipeline pipeline = 2; required ContainerInfo info = 3; } @@ -98,7 +98,7 @@ message ContainerInfo { optional int64 readBytes = 8; optional int64 writeBytes = 9; required int64 containerID = 10; - optional hadoop.hdsl.LifeCycleState state = 11; + optional hadoop.hdds.LifeCycleState state = 11; } // The deleted blocks which are stored in deletedBlock.db of scm. @@ -161,7 +161,7 @@ message SCMVersionRequestProto { */ message SCMVersionResponseProto { required uint32 softwareVersion = 1; - repeated hadoop.hdsl.KeyValue keys = 2; + repeated hadoop.hdds.KeyValue keys = 2; } message SCMNodeAddressList { diff --git a/hadoop-hdsl/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider b/hadoop-hdds/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider similarity index 100% rename from hadoop-hdsl/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider rename to hadoop-hdds/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider diff --git a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java similarity index 100% rename from hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java rename to hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java index 4501940a1b5..9db792bc7b6 100644 --- a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java @@ -17,8 +17,6 @@ package org.apache.hadoop.ozone.container.common; -import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.ipc.ProtobufRpcEngine; @@ -31,6 +29,8 @@ import org.apache.hadoop.ozone.protocolPB import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB; import org.apache.hadoop.security.UserGroupInformation; +import java.net.InetSocketAddress; + /** * Helper utility to test containers. */ diff --git a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java similarity index 97% rename from hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java rename to hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java index 32c28aa12df..b63c5fbe9bf 100644 --- a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java @@ -16,15 +16,11 @@ */ package org.apache.hadoop.ozone.container.common; -import java.io.IOException; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.ServerSocket; - +import com.google.protobuf.BlockingService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos .StorageContainerDatanodeProtocolService; import org.apache.hadoop.ipc.ProtobufRpcEngine; @@ -34,7 +30,10 @@ import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB; import org.apache.hadoop.ozone.protocolPB .StorageContainerDatanodeProtocolServerSideTranslatorPB; -import com.google.protobuf.BlockingService; +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.ServerSocket; /** * Test Endpoint class. diff --git a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java similarity index 88% rename from hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java rename to hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java index 4349b1aa25f..41a8a8012c9 100644 --- a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java @@ -17,19 +17,28 @@ package org.apache.hadoop.ozone.container.common; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.scm.VersionInfo; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos + .ContainerBlocksDeletionACKResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerInfo; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol; import org.apache.hadoop.ozone.protocol.VersionResponse; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; -import org.apache.hadoop.ozone.scm.VersionInfo; import java.io.IOException; import java.util.HashMap; diff --git a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java similarity index 95% rename from hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java rename to hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java index f05ba4941a4..9446ce275ac 100644 --- a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java @@ -19,18 +19,24 @@ package org.apache.hadoop.ozone.container.common; import com.google.common.collect.Maps; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine; -import org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine; -import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager; +import org.apache.hadoop.ozone.container.common.statemachine + .DatanodeStateMachine; +import org.apache.hadoop.ozone.container.common.statemachine + .EndpointStateMachine; +import org.apache.hadoop.ozone.container.common.statemachine + .SCMConnectionManager; import org.apache.hadoop.ozone.container.common.states.DatanodeState; -import org.apache.hadoop.ozone.container.common.states.datanode.InitDatanodeState; -import org.apache.hadoop.ozone.container.common.states.datanode.RunningDatanodeState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.ozone.container.common.states.datanode + .InitDatanodeState; +import org.apache.hadoop.ozone.container.common.states.datanode + .RunningDatanodeState; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.junit.After; @@ -53,9 +59,9 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_HEARTBEAT_RPC_TIMEOUT; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; import static org.junit.Assert.assertTrue; /** diff --git a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java similarity index 98% rename from hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java rename to hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java index 61bd65f0d9d..86888aa790c 100644 --- a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java @@ -20,11 +20,12 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.ozone.container.common.statemachine.background.BlockDeletingService; +import org.apache.hadoop.ozone.container.common.statemachine.background + .BlockDeletingService; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** diff --git a/hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java similarity index 100% rename from hadoop-hdsl/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java rename to hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java diff --git a/hadoop-hdsl/framework/README.md b/hadoop-hdds/framework/README.md similarity index 95% rename from hadoop-hdsl/framework/README.md rename to hadoop-hdds/framework/README.md index 1f4d217556f..59cdac71f39 100644 --- a/hadoop-hdsl/framework/README.md +++ b/hadoop-hdds/framework/README.md @@ -15,9 +15,9 @@ limitations under the License. --> -# Server framework for HDSL/Ozone +# Server framework for HDDS/Ozone -This project contains generic utilities and resources for all the HDSL/Ozone +This project contains generic utilities and resources for all the HDDS/Ozone server-side components. The project is shared between the server/service projects but not with the diff --git a/hadoop-hdsl/framework/pom.xml b/hadoop-hdds/framework/pom.xml similarity index 89% rename from hadoop-hdsl/framework/pom.xml rename to hadoop-hdds/framework/pom.xml index 44ef7fedf6c..a234b8e32d4 100644 --- a/hadoop-hdsl/framework/pom.xml +++ b/hadoop-hdds/framework/pom.xml @@ -19,26 +19,24 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.apache.hadoop - hadoop-hdsl + hadoop-hdds 3.2.0-SNAPSHOT - hadoop-hdsl-server-framework + hadoop-hdds-server-framework 3.2.0-SNAPSHOT - Apache Hadoop HDSL Common utilities for server side - components - - Apache Hadoop HDSL Server Common + Apache HDDS server framework + Apache HDDS Server Common jar - hdsl + hdds true org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided diff --git a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java similarity index 97% rename from hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java index f9c3991886a..90de00273c9 100644 --- a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/BaseHttpServer.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.hdsl.server; +package org.apache.hadoop.hdds.server; import com.google.common.base.Optional; import org.apache.hadoop.conf.Configuration; @@ -32,8 +32,8 @@ import javax.servlet.http.HttpServlet; import java.io.IOException; import java.net.InetSocketAddress; -import static org.apache.hadoop.hdsl.HdslUtils.getHostNameFromConfigKeys; -import static org.apache.hadoop.hdsl.HdslUtils.getPortNumberFromConfigKeys; +import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys; +import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys; /** * Base class for HTTP server of the Ozone related components. diff --git a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java similarity index 97% rename from hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java index f315ecb11e9..2cf5b351a9a 100644 --- a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServerUtils.java @@ -15,23 +15,22 @@ * the License. */ -package org.apache.hadoop.hdsl.server; - -import java.io.File; -import java.net.InetSocketAddress; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.ozone.OzoneConfigKeys; +package org.apache.hadoop.hdds.server; import com.google.common.base.Preconditions; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.http.client.methods.HttpRequestBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.File; +import java.net.InetSocketAddress; + /** - * Generic utilities for all HDSL/Ozone servers. + * Generic utilities for all HDDS/Ozone servers. */ public class ServerUtils { diff --git a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java similarity index 98% rename from hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java index f9c57ea72db..bcd75f3f215 100644 --- a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfo.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl.server; +package org.apache.hadoop.hdds.server; /** * Common runtime information for any service components. diff --git a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java similarity index 97% rename from hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java index 92f00b1c5b6..36d6b64a688 100644 --- a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/ServiceRuntimeInfoImpl.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl.server; +package org.apache.hadoop.hdds.server; import org.apache.hadoop.util.VersionInfo; diff --git a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java similarity index 88% rename from hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java rename to hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java index b38215ccbfb..35ad5e7f496 100644 --- a/hadoop-hdsl/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdsl/server/package-info.java @@ -16,8 +16,8 @@ * limitations under the License. */ -package org.apache.hadoop.hdsl.server; +package org.apache.hadoop.hdds.server; /** - * Common server side utilities for all the hdsl/ozone server components. + * Common server side utilities for all the hdds/ozone server components. */ \ No newline at end of file diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/datanode/dn.js b/hadoop-hdds/framework/src/main/resources/webapps/datanode/dn.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/datanode/dn.js rename to hadoop-hdds/framework/src/main/resources/webapps/datanode/dn.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/angular-1.6.4.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-1.6.4.min.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/angular-1.6.4.min.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/angular-1.6.4.min.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/d3-3.5.17.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/d3-3.5.17.min.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/d3-3.5.17.min.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/d3-3.5.17.min.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/dfs-dust.js b/hadoop-hdds/framework/src/main/resources/webapps/static/dfs-dust.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/dfs-dust.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/dfs-dust.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css rename to hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map rename to hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map rename to hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/ozone.css b/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.css similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/ozone.css rename to hadoop-hdds/framework/src/main/resources/webapps/static/ozone.css diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/ozone.js b/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.js similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/ozone.js rename to hadoop-hdds/framework/src/main/resources/webapps/static/ozone.js diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/templates/config.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/config.html similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/templates/config.html rename to hadoop-hdds/framework/src/main/resources/webapps/static/templates/config.html diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/templates/jvm.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/jvm.html similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/templates/jvm.html rename to hadoop-hdds/framework/src/main/resources/webapps/static/templates/jvm.html diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/templates/menu.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/menu.html similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/templates/menu.html rename to hadoop-hdds/framework/src/main/resources/webapps/static/templates/menu.html diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/templates/overview.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/overview.html similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/templates/overview.html rename to hadoop-hdds/framework/src/main/resources/webapps/static/templates/overview.html diff --git a/hadoop-hdsl/framework/src/main/resources/webapps/static/templates/rpc-metrics.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/rpc-metrics.html similarity index 100% rename from hadoop-hdsl/framework/src/main/resources/webapps/static/templates/rpc-metrics.html rename to hadoop-hdds/framework/src/main/resources/webapps/static/templates/rpc-metrics.html diff --git a/hadoop-hdsl/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java similarity index 96% rename from hadoop-hdsl/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java rename to hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java index fcd5f7fbf75..c6eae0e5fa6 100644 --- a/hadoop-hdsl/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdsl/server/TestBaseHttpServer.java @@ -15,15 +15,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hdsl.server; +package org.apache.hadoop.hdds.server; import org.apache.hadoop.conf.Configuration; - import org.junit.Assert; import org.junit.Test; /** - * Test Common ozone/hdsl web methods. + * Test Common ozone/hdds web methods. */ public class TestBaseHttpServer { @Test diff --git a/hadoop-hdsl/framework/src/test/resources/ozone-site.xml b/hadoop-hdds/framework/src/test/resources/ozone-site.xml similarity index 100% rename from hadoop-hdsl/framework/src/test/resources/ozone-site.xml rename to hadoop-hdds/framework/src/test/resources/ozone-site.xml diff --git a/hadoop-hdsl/pom.xml b/hadoop-hdds/pom.xml similarity index 95% rename from hadoop-hdsl/pom.xml rename to hadoop-hdds/pom.xml index 0a40fbb2622..01b696cec03 100644 --- a/hadoop-hdsl/pom.xml +++ b/hadoop-hdds/pom.xml @@ -23,10 +23,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 3.2.0-SNAPSHOT ../hadoop-project-dist - hadoop-hdsl + + hadoop-hdds 3.2.0-SNAPSHOT - Apache Hadoop Hdsl Parent project - Apache Hadoop Hdsl + Apache Hadoop Distributed Data Store Parent project + Apache Hdds pom diff --git a/hadoop-hdsl/server-scm/pom.xml b/hadoop-hdds/server-scm/pom.xml similarity index 88% rename from hadoop-hdsl/server-scm/pom.xml rename to hadoop-hdds/server-scm/pom.xml index e7b8bf0e222..35975f4e492 100644 --- a/hadoop-hdsl/server-scm/pom.xml +++ b/hadoop-hdds/server-scm/pom.xml @@ -19,48 +19,48 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.apache.hadoop - hadoop-hdsl + hadoop-hdds 3.2.0-SNAPSHOT - hadoop-hdsl-server-scm + hadoop-hdds-server-scm 3.2.0-SNAPSHOT - Apache Hadoop HDSL SCM server - Apache Hadoop HDSL SCM server + Apache HDDS SCM server + Apache Hadoop HDDS SCM server jar - hdsl + hdds true org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service provided org.apache.hadoop - hadoop-hdsl-client + hadoop-hdds-client provided org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework provided org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service test test-jar @@ -141,7 +141,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework ${project.build.directory}/ webapps/static/**/*.* diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/SCMMXBean.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMMXBean.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/SCMMXBean.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMMXBean.java index 3b31d9f70c4..17b681443b1 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/SCMMXBean.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMMXBean.java @@ -16,12 +16,12 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm; - -import java.util.Map; +package org.apache.hadoop.hdds.scm; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.server.ServiceRuntimeInfo; +import org.apache.hadoop.hdds.server.ServiceRuntimeInfo; + +import java.util.Map; /** * diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/SCMStorage.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMStorage.java similarity index 90% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/SCMStorage.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMStorage.java index 4a6d4c06a26..27e93632337 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/SCMStorage.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMStorage.java @@ -15,19 +15,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; + +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType; +import org.apache.hadoop.ozone.common.Storage; import java.io.IOException; import java.util.Properties; import java.util.UUID; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.common.Storage; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeType; - +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; import static org.apache.hadoop.ozone.OzoneConsts.SCM_ID; import static org.apache.hadoop.ozone.OzoneConsts.STORAGE_DIR; -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; /** * SCMStorage is responsible for management of the StorageDirectories used by diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/StorageContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManager.java similarity index 87% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/StorageContainerManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManager.java index ef925ca849a..1a78deef41a 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/StorageContainerManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManager.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; @@ -26,18 +26,79 @@ import com.google.common.cache.RemovalNotification; import com.google.protobuf.BlockingService; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.HddsUtils; +import org.apache.hadoop.hdds.scm.block.BlockManager; +import org.apache.hadoop.hdds.scm.block.BlockManagerImpl; +import org.apache.hadoop.hdds.scm.container.ContainerMapping; +import org.apache.hadoop.hdds.scm.container.Mapping; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.DeleteBlockResult; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMMetrics; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.SCMNodeManager; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.HdslUtils; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto + .DeleteBlockTransactionResult; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos + .ContainerBlocksDeletionACKResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeAddressList; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SendContainerReportProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; +import org.apache.hadoop.hdds.server.ServiceRuntimeInfoImpl; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.hdsl.server.ServiceRuntimeInfoImpl; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.ozone.OzoneConfigKeys; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED; import org.apache.hadoop.ozone.common.BlockGroup; import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; import org.apache.hadoop.ozone.common.Storage.StorageState; @@ -47,64 +108,16 @@ import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand; import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand; import org.apache.hadoop.ozone.protocol.commands.RegisteredCommand; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeAddressList; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SendContainerReportProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto.DeleteBlockTransactionResult; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto; -import org.apache.hadoop.ozone.protocolPB.ScmBlockLocationProtocolServerSideTranslatorPB; +import org.apache.hadoop.ozone.protocolPB + .ScmBlockLocationProtocolServerSideTranslatorPB; import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB; -import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolServerSideTranslatorPB; -import org.apache.hadoop.ozone.protocolPB.StorageContainerLocationProtocolServerSideTranslatorPB; -import org.apache.hadoop.ozone.scm.block.BlockManager; -import org.apache.hadoop.ozone.scm.block.BlockManagerImpl; -import org.apache.hadoop.ozone.scm.container.ContainerMapping; -import org.apache.hadoop.ozone.scm.container.Mapping; -import org.apache.hadoop.ozone.scm.container.placement.metrics.ContainerStat; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMMetrics; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.node.SCMNodeManager; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.DeleteBlockResult; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; -import org.apache.hadoop.scm.protocol.StorageContainerLocationProtocol; -import org.apache.hadoop.scm.protocolPB.ScmBlockLocationProtocolPB; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB; -import org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes; +import org.apache.hadoop.ozone.protocolPB + .StorageContainerDatanodeProtocolServerSideTranslatorPB; +import org.apache.hadoop.ozone.protocolPB + .StorageContainerLocationProtocolServerSideTranslatorPB; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.StringUtils; - -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmBlockClientBindAddress; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmClientBindAddress; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmDataNodeBindAddress; -import static org.apache.hadoop.hdsl.server.ServerUtils - .updateRPCListenAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -127,19 +140,24 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static org.apache.hadoop.hdsl.protocol.proto - .ScmBlockLocationProtocolProtos.DeleteScmBlockResult.Result; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CLIENT_ADDRESS_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_DATANODE_ADDRESS_KEY; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_DB_CACHE_SIZE_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_MB; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_MB; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_HANDLER_COUNT_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_HANDLER_COUNT_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HANDLER_COUNT_KEY; +import static org.apache.hadoop.hdds.protocol.proto + .ScmBlockLocationProtocolProtos.DeleteScmBlockResult.Result; +import static org.apache.hadoop.hdds.server.ServerUtils.updateRPCListenAddress; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED; import static org.apache.hadoop.util.ExitUtil.terminate; /** @@ -281,7 +299,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl new StorageContainerDatanodeProtocolServerSideTranslatorPB(this)); final InetSocketAddress datanodeRpcAddr = - getScmDataNodeBindAddress(conf); + HddsServerUtil.getScmDataNodeBindAddress(conf); datanodeRpcServer = startRpcServer(conf, datanodeRpcAddr, StorageContainerDatanodeProtocolPB.class, dnProtoPbService, handlerCount); @@ -296,7 +314,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl new StorageContainerLocationProtocolServerSideTranslatorPB(this)); final InetSocketAddress scmAddress = - getScmClientBindAddress(conf); + HddsServerUtil.getScmClientBindAddress(conf); clientRpcServer = startRpcServer(conf, scmAddress, StorageContainerLocationProtocolPB.class, storageProtoPbService, handlerCount); @@ -311,7 +329,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl new ScmBlockLocationProtocolServerSideTranslatorPB(this)); final InetSocketAddress scmBlockAddress = - getScmBlockClientBindAddress(conf); + HddsServerUtil.getScmBlockClientBindAddress(conf); blockRpcServer = startRpcServer(conf, scmBlockAddress, ScmBlockLocationProtocolPB.class, blockProtoPbService, handlerCount); @@ -447,7 +465,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl public static StorageContainerManager createSCM(String[] argv, OzoneConfiguration conf) throws IOException { - if (!HdslUtils.isHdslEnabled(conf)) { + if (!HddsUtils.isHddsEnabled(conf)) { System.err.println("SCM cannot be started in secure mode or when " + OZONE_ENABLED + " is set to false"); System.exit(1); @@ -659,19 +677,19 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl * @param poolName @return List of Datanodes. */ @Override - public HdslProtos.NodePool queryNode(EnumSet nodeStatuses, - HdslProtos.QueryScope queryScope, String poolName) throws IOException { + public HddsProtos.NodePool queryNode(EnumSet nodeStatuses, + HddsProtos.QueryScope queryScope, String poolName) throws IOException { - if (queryScope == HdslProtos.QueryScope.POOL) { + if (queryScope == HddsProtos.QueryScope.POOL) { throw new IllegalArgumentException("Not Supported yet"); } List datanodes = queryNode(nodeStatuses); - HdslProtos.NodePool.Builder poolBuilder = - HdslProtos.NodePool.newBuilder(); + HddsProtos.NodePool.Builder poolBuilder = + HddsProtos.NodePool.newBuilder(); for (DatanodeDetails datanode : datanodes) { - HdslProtos.Node node = HdslProtos.Node.newBuilder() + HddsProtos.Node node = HddsProtos.Node.newBuilder() .setNodeID(datanode.getProtoBufMessage()) .addAllNodeStates(nodeStatuses) .build(); @@ -701,18 +719,18 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl if (op == ObjectStageChangeRequestProto.Op.create) { if (stage == ObjectStageChangeRequestProto.Stage.begin) { scmContainerManager.updateContainerState(name, - HdslProtos.LifeCycleEvent.CREATE); + HddsProtos.LifeCycleEvent.CREATE); } else { scmContainerManager.updateContainerState(name, - HdslProtos.LifeCycleEvent.CREATED); + HddsProtos.LifeCycleEvent.CREATED); } } else if (op == ObjectStageChangeRequestProto.Op.close) { if (stage == ObjectStageChangeRequestProto.Stage.begin) { scmContainerManager.updateContainerState(name, - HdslProtos.LifeCycleEvent.FINALIZE); + HddsProtos.LifeCycleEvent.FINALIZE); } else { scmContainerManager.updateContainerState(name, - HdslProtos.LifeCycleEvent.CLOSE); + HddsProtos.LifeCycleEvent.CLOSE); } } } //else if (type == ObjectStageChangeRequestProto.Type.pipeline) { @@ -725,9 +743,9 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl */ @Override public Pipeline createReplicationPipeline( - HdslProtos.ReplicationType replicationType, - HdslProtos.ReplicationFactor factor, - HdslProtos.NodePool nodePool) + HddsProtos.ReplicationType replicationType, + HddsProtos.ReplicationFactor factor, + HddsProtos.NodePool nodePool) throws IOException { // TODO: will be addressed in future patch. return null; @@ -803,8 +821,8 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl * @throws IOException */ @Override - public Pipeline allocateContainer(HdslProtos.ReplicationType replicationType, - HdslProtos.ReplicationFactor replicationFactor, String containerName, + public Pipeline allocateContainer(HddsProtos.ReplicationType replicationType, + HddsProtos.ReplicationFactor replicationFactor, String containerName, String owner) throws IOException { checkAdminAccess(); @@ -1129,7 +1147,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl */ @Override public AllocatedBlock allocateBlock(long size, - HdslProtos.ReplicationType type, HdslProtos.ReplicationFactor factor, + HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor, String owner) throws IOException { return scmBlockManager.allocateBlock(size, type, factor, owner); } diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/StorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManagerHttpServer.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/StorageContainerManagerHttpServer.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManagerHttpServer.java index 0dcf364f3d2..1ca059ca4c9 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/StorageContainerManagerHttpServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManagerHttpServer.java @@ -15,12 +15,11 @@ * the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.server.BaseHttpServer; +import org.apache.hadoop.hdds.server.BaseHttpServer; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.scm.ScmConfigKeys; import java.io.IOException; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManager.java similarity index 87% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManager.java index 6d964f0f99d..4ab251641c5 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManager.java @@ -15,11 +15,11 @@ * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import java.io.Closeable; import java.io.IOException; @@ -39,8 +39,8 @@ public interface BlockManager extends Closeable { * @return AllocatedBlock * @throws IOException */ - AllocatedBlock allocateBlock(long size, HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor, String owner) throws IOException; + AllocatedBlock allocateBlock(long size, HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor, String owner) throws IOException; /** * Give the key to the block, get the pipeline info. diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java similarity index 92% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockManagerImpl.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java index 36f73cc6154..d9661124f57 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockManagerImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java @@ -14,29 +14,27 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.Mapping; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.ozone.scm.container.Mapping; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; - -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,6 +51,13 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes + .CHILL_MODE_EXCEPTION; +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes + .FAILED_TO_FIND_BLOCK; +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes + .INVALID_BLOCK_SIZE; +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_BLOCK_DELETING_SERVICE_INTERVAL; import static org.apache.hadoop.ozone.OzoneConfigKeys @@ -62,12 +67,6 @@ import static org.apache.hadoop.ozone.OzoneConfigKeys import static org.apache.hadoop.ozone.OzoneConfigKeys .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT; import static org.apache.hadoop.ozone.OzoneConsts.BLOCK_DB; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes - .CHILL_MODE_EXCEPTION; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes - .FAILED_TO_FIND_BLOCK; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes - .INVALID_BLOCK_SIZE; /** Block Manager manages the block access for SCM. */ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean { @@ -265,12 +264,12 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean { containerManager .getStateManager() .getMatchingContainer( - size, owner, type, factor, HdslProtos.LifeCycleState + size, owner, type, factor, HddsProtos.LifeCycleState .ALLOCATED); if (containerInfo != null) { containerManager.updateContainerState(containerInfo.getContainerName(), - HdslProtos.LifeCycleEvent.CREATE); - return newBlock(containerInfo, HdslProtos.LifeCycleState.ALLOCATED); + HddsProtos.LifeCycleEvent.CREATE); + return newBlock(containerInfo, HddsProtos.LifeCycleState.ALLOCATED); } // Since we found no allocated containers that match our criteria, let us @@ -278,10 +277,10 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean { containerInfo = containerManager .getStateManager() - .getMatchingContainer(size, owner, type, factor, HdslProtos + .getMatchingContainer(size, owner, type, factor, HddsProtos .LifeCycleState.OPEN); if (containerInfo != null) { - return newBlock(containerInfo, HdslProtos.LifeCycleState.OPEN); + return newBlock(containerInfo, HddsProtos.LifeCycleState.OPEN); } // We found neither ALLOCATED or OPEN Containers. This generally means @@ -295,12 +294,12 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean { containerManager .getStateManager() .getMatchingContainer( - size, owner, type, factor, HdslProtos.LifeCycleState + size, owner, type, factor, HddsProtos.LifeCycleState .ALLOCATED); if (containerInfo != null) { containerManager.updateContainerState(containerInfo.getContainerName(), - HdslProtos.LifeCycleEvent.CREATE); - return newBlock(containerInfo, HdslProtos.LifeCycleState.ALLOCATED); + HddsProtos.LifeCycleEvent.CREATE); + return newBlock(containerInfo, HddsProtos.LifeCycleState.ALLOCATED); } // we have tried all strategies we know and but somehow we are not able @@ -325,12 +324,12 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean { * @return AllocatedBlock */ private AllocatedBlock newBlock( - ContainerInfo containerInfo, HdslProtos.LifeCycleState state) + ContainerInfo containerInfo, HddsProtos.LifeCycleState state) throws IOException { // TODO : Replace this with Block ID. String blockKey = UUID.randomUUID().toString(); - boolean createContainer = (state == HdslProtos.LifeCycleState.ALLOCATED); + boolean createContainer = (state == HddsProtos.LifeCycleState.ALLOCATED); AllocatedBlock.Builder abb = new AllocatedBlock.Builder() @@ -521,7 +520,7 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean { // sense. // We have to get open containers by Replication Type and Replication // factor. Hence returning 0 for now. - // containers.get(HdslProtos.LifeCycleState.OPEN).size(); + // containers.get(HddsProtos.LifeCycleState.OPEN).size(); } @Override diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockmanagerMXBean.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockmanagerMXBean.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockmanagerMXBean.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockmanagerMXBean.java index efcfc637341..23c6983083e 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/BlockmanagerMXBean.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockmanagerMXBean.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; /** diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DatanodeDeletedBlockTransactions.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DatanodeDeletedBlockTransactions.java similarity index 92% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DatanodeDeletedBlockTransactions.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DatanodeDeletedBlockTransactions.java index 9a208d5824b..47074d28ecb 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DatanodeDeletedBlockTransactions.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DatanodeDeletedBlockTransactions.java @@ -14,7 +14,14 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; + +import com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hdds.scm.container.Mapping; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import java.io.IOException; import java.util.Collections; @@ -23,13 +30,6 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; -import org.apache.hadoop.ozone.scm.container.Mapping; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; - -import com.google.common.collect.ArrayListMultimap; - /** * A wrapper class to hold info about datanode and all deleted block * transactions that will be sent to this datanode. diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DeletedBlockLog.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLog.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DeletedBlockLog.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLog.java index a34d9605b83..f7b770eca0e 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DeletedBlockLog.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLog.java @@ -15,9 +15,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import java.io.Closeable; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DeletedBlockLogImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DeletedBlockLogImpl.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java index 77c59b04c4c..0f4988afa33 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/DeletedBlockLogImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java @@ -15,21 +15,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; - -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,11 +42,16 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_BLOCK_DELETION_MAX_RETRY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_BLOCK_DELETION_MAX_RETRY_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_MB; +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; import static org.apache.hadoop.ozone.OzoneConsts.DELETED_BLOCK_DB; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_BLOCK_DELETION_MAX_RETRY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_BLOCK_DELETION_MAX_RETRY_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_MB; /** * A implement class of {@link DeletedBlockLog}, and it uses diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/SCMBlockDeletingService.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java similarity index 90% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/SCMBlockDeletingService.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java index 5d7c2d57191..2c555e04212 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/SCMBlockDeletingService.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java @@ -14,18 +14,18 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.container.Mapping; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; -import org.apache.hadoop.ozone.scm.container.Mapping; -import org.apache.hadoop.ozone.scm.node.NodeManager; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.BackgroundService; import org.apache.hadoop.utils.BackgroundTask; @@ -34,18 +34,20 @@ import org.apache.hadoop.utils.BackgroundTaskResult.EmptyTaskResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT; - import java.io.IOException; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT; + /** * A background service running in SCM to delete blocks. This service scans * block deletion log in certain interval and caches block deletion commands - * in {@link org.apache.hadoop.ozone.scm.node.CommandQueue}, asynchronously + * in {@link org.apache.hadoop.hdds.scm.node.CommandQueue}, asynchronously * SCM HB thread polls cached commands and sends them to datanode for physical * processing. */ diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/package-info.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/package-info.java index 3c566277611..e1bfdff5063 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/block/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/package-info.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; /** * This package contains routines to manage the block location and * mapping inside SCM diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerMapping.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java similarity index 90% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerMapping.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java index 364bf7d93a7..63cb3a3c1fd 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerMapping.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java @@ -14,36 +14,34 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container; +package org.apache.hadoop.hdds.scm.container; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.closer.ContainerCloser; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.replication.ContainerSupervisor; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.lease.Lease; import org.apache.hadoop.ozone.lease.LeaseException; import org.apache.hadoop.ozone.lease.LeaseManager; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.ozone.scm.container.closer.ContainerCloser; -import org.apache.hadoop.ozone.scm.container.replication.ContainerSupervisor; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.pipelines.PipelineSelector; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; - -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,12 +55,14 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import static org.apache.hadoop.ozone.OzoneConsts.SCM_CONTAINER_DB; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes - .FAILED_TO_CHANGE_CONTAINER_STATE; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_CONTAINER_SIZE_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_SIZE_GB; +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes + .FAILED_TO_CHANGE_CONTAINER_STATE; +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; +import static org.apache.hadoop.ozone.OzoneConsts.SCM_CONTAINER_DB; /** * Mapping class contains the mapping from a name to a pipeline mapping. This @@ -162,7 +162,7 @@ public class ContainerMapping implements Mapping { SCMException.ResultCodes.FAILED_TO_FIND_CONTAINER); } - HdslProtos.SCMContainerInfo temp = HdslProtos.SCMContainerInfo.PARSER + HddsProtos.SCMContainerInfo temp = HddsProtos.SCMContainerInfo.PARSER .parseFrom(containerBytes); containerInfo = ContainerInfo.fromProtobuf(temp); return containerInfo; @@ -194,7 +194,7 @@ public class ContainerMapping implements Mapping { for (Map.Entry entry : range) { ContainerInfo containerInfo = ContainerInfo.fromProtobuf( - HdslProtos.SCMContainerInfo.PARSER.parseFrom( + HddsProtos.SCMContainerInfo.PARSER.parseFrom( entry.getValue())); Preconditions.checkNotNull(containerInfo); containerList.add(containerInfo); @@ -283,8 +283,8 @@ public class ContainerMapping implements Mapping { * {@inheritDoc} Used by client to update container state on SCM. */ @Override - public HdslProtos.LifeCycleState updateContainerState( - String containerName, HdslProtos.LifeCycleEvent event) throws + public HddsProtos.LifeCycleState updateContainerState( + String containerName, HddsProtos.LifeCycleEvent event) throws IOException { ContainerInfo containerInfo; lock.lock(); @@ -299,7 +299,7 @@ public class ContainerMapping implements Mapping { SCMException.ResultCodes.FAILED_TO_FIND_CONTAINER); } containerInfo = - ContainerInfo.fromProtobuf(HdslProtos.SCMContainerInfo.PARSER + ContainerInfo.fromProtobuf(HddsProtos.SCMContainerInfo.PARSER .parseFrom(containerBytes)); Preconditions.checkNotNull(containerInfo); @@ -311,7 +311,7 @@ public class ContainerMapping implements Mapping { // Register callback to be executed in case of timeout containerLease.registerCallBack(() -> { updateContainerState(containerName, - HdslProtos.LifeCycleEvent.TIMEOUT); + HddsProtos.LifeCycleEvent.TIMEOUT); return null; }); break; @@ -393,10 +393,10 @@ public class ContainerMapping implements Mapping { try { byte[] containerBytes = containerStore.get(dbKey); if (containerBytes != null) { - HdslProtos.SCMContainerInfo knownState = - HdslProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes); + HddsProtos.SCMContainerInfo knownState = + HddsProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes); - HdslProtos.SCMContainerInfo newState = + HddsProtos.SCMContainerInfo newState = reconcileState(datanodeState, knownState); // FIX ME: This can be optimized, we write twice to memory, where a @@ -431,11 +431,11 @@ public class ContainerMapping implements Mapping { * @param knownState - State inside SCM. * @return new SCM State for this container. */ - private HdslProtos.SCMContainerInfo reconcileState( + private HddsProtos.SCMContainerInfo reconcileState( StorageContainerDatanodeProtocolProtos.ContainerInfo datanodeState, - HdslProtos.SCMContainerInfo knownState) { - HdslProtos.SCMContainerInfo.Builder builder = - HdslProtos.SCMContainerInfo.newBuilder(); + HddsProtos.SCMContainerInfo knownState) { + HddsProtos.SCMContainerInfo.Builder builder = + HddsProtos.SCMContainerInfo.newBuilder(); builder.setContainerName(knownState.getContainerName()); builder.setPipeline(knownState.getPipeline()); // If used size is greater than allocated size, we will be updating @@ -468,7 +468,7 @@ public class ContainerMapping implements Mapping { * @param newState - This is the state we maintain in SCM. * @throws IOException */ - private boolean closeContainerIfNeeded(HdslProtos.SCMContainerInfo newState) + private boolean closeContainerIfNeeded(HddsProtos.SCMContainerInfo newState) throws IOException { float containerUsedPercentage = 1.0f * newState.getUsedBytes() / this.size; @@ -487,10 +487,10 @@ public class ContainerMapping implements Mapping { // container to reach. We will know that a container has reached the // closed state from container reports. This state change should be // invoked once and only once. - HdslProtos.LifeCycleState state = updateContainerState( + HddsProtos.LifeCycleState state = updateContainerState( scmInfo.getContainerName(), - HdslProtos.LifeCycleEvent.FINALIZE); - if (state != HdslProtos.LifeCycleState.CLOSING) { + HddsProtos.LifeCycleEvent.FINALIZE); + if (state != HddsProtos.LifeCycleState.CLOSING) { LOG.error("Failed to close container {}, reason : Not able " + "to " + "update container state, current container state: {}.", @@ -511,11 +511,11 @@ public class ContainerMapping implements Mapping { * @return true if is in open state, false otherwise */ private boolean shouldClose(ContainerInfo info) { - return info.getState() == HdslProtos.LifeCycleState.OPEN; + return info.getState() == HddsProtos.LifeCycleState.OPEN; } private boolean isClosed(ContainerInfo info) { - return info.getState() == HdslProtos.LifeCycleState.CLOSED; + return info.getState() == HddsProtos.LifeCycleState.CLOSED; } @VisibleForTesting @@ -572,8 +572,8 @@ public class ContainerMapping implements Mapping { // return info of a deleted container. may revisit this in the future, // for now, just skip a not-found container if (containerBytes != null) { - HdslProtos.SCMContainerInfo oldInfoProto = - HdslProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes); + HddsProtos.SCMContainerInfo oldInfoProto = + HddsProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes); ContainerInfo oldInfo = ContainerInfo.fromProtobuf(oldInfoProto); ContainerInfo newInfo = new ContainerInfo.Builder() .setAllocatedBytes(info.getAllocatedBytes()) diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStateManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java similarity index 91% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStateManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java index b9f0e4c8d4b..227eca04e88 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStateManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java @@ -15,26 +15,26 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container; +package org.apache.hadoop.hdds.scm.container; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.states.ContainerState; +import org.apache.hadoop.hdds.scm.container.states.ContainerStateMap; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.ozone.common.statemachine.InvalidStateTransitionException; +import org.apache.hadoop.ozone.common.statemachine + .InvalidStateTransitionException; import org.apache.hadoop.ozone.common.statemachine.StateMachine; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleEvent; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerID; -import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerState; -import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerStateMap; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.pipelines.PipelineSelector; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,7 +49,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes .FAILED_TO_CHANGE_CONTAINER_STATE; /** @@ -113,8 +113,8 @@ public class ContainerStateManager implements Closeable { private static final Logger LOG = LoggerFactory.getLogger(ContainerStateManager.class); - private final StateMachine stateMachine; + private final StateMachine stateMachine; private final long containerSize; private final ConcurrentHashMap lastUsedMap; @@ -132,7 +132,7 @@ public class ContainerStateManager implements Closeable { Mapping containerMapping) { // Initialize the container state machine. - Set finalStates = new HashSet(); + Set finalStates = new HashSet(); // These are the steady states of a container. finalStates.add(LifeCycleState.OPEN); @@ -284,8 +284,8 @@ public class ContainerStateManager implements Closeable { * @return Container Info. * @throws IOException on Failure. */ - public ContainerInfo allocateContainer(PipelineSelector selector, HdslProtos - .ReplicationType type, HdslProtos.ReplicationFactor replicationFactor, + public ContainerInfo allocateContainer(PipelineSelector selector, HddsProtos + .ReplicationType type, HddsProtos.ReplicationFactor replicationFactor, final String containerName, String owner) throws IOException { @@ -298,7 +298,7 @@ public class ContainerStateManager implements Closeable { ContainerInfo containerInfo = new ContainerInfo.Builder() .setContainerName(containerName) - .setState(HdslProtos.LifeCycleState.ALLOCATED) + .setState(HddsProtos.LifeCycleState.ALLOCATED) .setPipeline(pipeline) // This is bytes allocated for blocks inside container, not the // container size @@ -324,7 +324,7 @@ public class ContainerStateManager implements Closeable { * @throws SCMException on Failure. */ public ContainerInfo updateContainerState(ContainerInfo - info, HdslProtos.LifeCycleEvent event) throws SCMException { + info, HddsProtos.LifeCycleEvent event) throws SCMException { LifeCycleState newState; try { newState = this.stateMachine.getNextState(info.getState(), event); diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/Mapping.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/Mapping.java similarity index 87% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/Mapping.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/Mapping.java index 1ba426c2f20..c949c6c4b54 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/Mapping.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/Mapping.java @@ -14,13 +14,12 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container; +package org.apache.hadoop.hdds.scm.container; - -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; import java.io.Closeable; import java.io.IOException; @@ -68,8 +67,8 @@ public interface Mapping extends Closeable { * @return - Container Info. * @throws IOException */ - ContainerInfo allocateContainer(HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor replicationFactor, + ContainerInfo allocateContainer(HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor replicationFactor, String containerName, String owner) throws IOException; /** @@ -87,8 +86,8 @@ public interface Mapping extends Closeable { * @return - new container state * @throws IOException */ - HdslProtos.LifeCycleState updateContainerState(String containerName, - HdslProtos.LifeCycleEvent event) throws IOException; + HddsProtos.LifeCycleState updateContainerState(String containerName, + HddsProtos.LifeCycleEvent event) throws IOException; /** * Returns the container State Manager. diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/closer/ContainerCloser.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/ContainerCloser.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/closer/ContainerCloser.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/ContainerCloser.java index c9256955f1b..b5d4da9ed1f 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/closer/ContainerCloser.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/ContainerCloser.java @@ -16,16 +16,16 @@ * */ -package org.apache.hadoop.ozone.scm.container.closer; +package org.apache.hadoop.hdds.scm.container.closer; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.ozone.scm.node.NodeManager; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -91,7 +91,7 @@ public class ContainerCloser { * * @param info - ContainerInfo. */ - public void close(HdslProtos.SCMContainerInfo info) { + public void close(HddsProtos.SCMContainerInfo info) { if (commandIssued.containsKey(info.getContainerName())) { // We check if we issued a close command in last 3 * reportInterval secs. @@ -126,8 +126,8 @@ public class ContainerCloser { // this queue can be emptied by a datanode after a close report is send // to SCM. In that case also, data node will ignore this command. - HdslProtos.Pipeline pipeline = info.getPipeline(); - for (HdslProtos.DatanodeDetailsProto datanodeDetails : + HddsProtos.Pipeline pipeline = info.getPipeline(); + for (HddsProtos.DatanodeDetailsProto datanodeDetails : pipeline.getPipelineChannel().getMembersList()) { nodeManager.addDatanodeCommand( DatanodeDetails.getFromProtoBuf(datanodeDetails).getUuid(), diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/closer/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/package-info.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/closer/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/package-info.java index 2d0f257b334..ee02bbd88f2 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/closer/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/package-info.java @@ -20,4 +20,4 @@ * This package has class that close a container. That is move a container from * open state to close state. */ -package org.apache.hadoop.ozone.scm.container.closer; \ No newline at end of file +package org.apache.hadoop.hdds.scm.container.closer; \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java index 4ac490cb6f5..3f8d05681bd 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container; +package org.apache.hadoop.hdds.scm.container; /** * This package contains routines to manage the container location and * mapping inside SCM diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/ContainerPlacementPolicy.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/ContainerPlacementPolicy.java similarity index 92% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/ContainerPlacementPolicy.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/ContainerPlacementPolicy.java index 1129d93f33b..5d91ac5dad1 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/ContainerPlacementPolicy.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/ContainerPlacementPolicy.java @@ -15,9 +15,9 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.algorithms; +package org.apache.hadoop.hdds.scm.container.placement.algorithms; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import java.io.IOException; import java.util.List; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMCommonPolicy.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMCommonPolicy.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMCommonPolicy.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMCommonPolicy.java index 71e5ebbb1ee..0a595d55ba5 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMCommonPolicy.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMCommonPolicy.java @@ -15,15 +15,15 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.algorithms; +package org.apache.hadoop.hdds.scm.container.placement.algorithms; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -104,7 +104,7 @@ public abstract class SCMCommonPolicy implements ContainerPlacementPolicy { public List chooseDatanodes(int nodesRequired, final long sizeRequired) throws SCMException { List healthyNodes = - nodeManager.getNodes(HdslProtos.NodeState.HEALTHY); + nodeManager.getNodes(HddsProtos.NodeState.HEALTHY); String msg; if (healthyNodes.size() == 0) { msg = "No healthy node found to allocate container."; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java index 90d301cfcc4..85a6b544cce 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java @@ -15,14 +15,14 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.algorithms; +package org.apache.hadoop.hdds.scm.container.placement.algorithms; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMContainerPlacementRandom.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementRandom.java similarity index 92% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMContainerPlacementRandom.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementRandom.java index f46976294e6..9903c84e317 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/SCMContainerPlacementRandom.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementRandom.java @@ -15,13 +15,13 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.algorithms; +package org.apache.hadoop.hdds.scm.container.placement.algorithms; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/package-info.java similarity index 92% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/package-info.java index d6280df4309..1cb810dd0e5 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/algorithms/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/package-info.java @@ -14,5 +14,5 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.algorithms; +package org.apache.hadoop.hdds.scm.container.placement.algorithms; // Various placement algorithms. \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/ContainerStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/ContainerStat.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/ContainerStat.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/ContainerStat.java index 810b8fd5dc2..b8e89987638 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/ContainerStat.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/ContainerStat.java @@ -15,14 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; - -import java.io.IOException; - -import org.apache.hadoop.ozone.web.utils.JsonUtils; +package org.apache.hadoop.hdds.scm.container.placement.metrics; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; +import org.apache.hadoop.ozone.web.utils.JsonUtils; + +import java.io.IOException; /** * This class represents the SCM container stat. diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/DatanodeMetric.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/DatanodeMetric.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/DatanodeMetric.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/DatanodeMetric.java index cc829c2c3f0..a6e732c7503 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/DatanodeMetric.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/DatanodeMetric.java @@ -15,9 +15,9 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; /** * DatanodeMetric acts as the basis for all the metric that is used in diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/LongMetric.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/LongMetric.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/LongMetric.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/LongMetric.java index dbcd9f42264..050d26bd23c 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/LongMetric.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/LongMetric.java @@ -14,7 +14,7 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/NodeStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/NodeStat.java similarity index 96% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/NodeStat.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/NodeStat.java index bda61f8ccd8..d6857d395cf 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/NodeStat.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/NodeStat.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; import com.google.common.annotations.VisibleForTesting; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMMetrics.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMMetrics.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMMetrics.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMMetrics.java index bdfec2d32aa..e4dd9aa37ef 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMMetrics.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMMetrics.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMNodeMetric.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeMetric.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMNodeMetric.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeMetric.java index ae01361663c..b50376d89dc 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMNodeMetric.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeMetric.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMNodeStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMNodeStat.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java index 64f76a6b0a5..3c871d3ef50 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/SCMNodeStat.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/package-info.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/package-info.java index 51350958e15..4a81d692168 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/metrics/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/package-info.java @@ -14,7 +14,7 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.placement.metrics; +package org.apache.hadoop.hdds.scm.container.placement.metrics; // Various metrics supported by Datanode and used by SCM in the placement // strategy. \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/package-info.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/package-info.java index 43676effc61..dc54d9bd912 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/placement/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/package-info.java @@ -15,5 +15,5 @@ * the License. */ -package org.apache.hadoop.ozone.scm.container.placement; +package org.apache.hadoop.hdds.scm.container.placement; // Classes related to container placement. \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/ContainerSupervisor.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerSupervisor.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/ContainerSupervisor.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerSupervisor.java index e51ad79e935..52321eee4a3 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/ContainerSupervisor.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerSupervisor.java @@ -14,18 +14,18 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.replication; +package org.apache.hadoop.hdds.scm.container.replication; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.slf4j.Logger; @@ -47,21 +47,21 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import static com.google.common.util.concurrent.Uninterruptibles .sleepUninterruptibly; -import static org.apache.hadoop.scm.ScmConfigKeys - .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT; -import static org.apache.hadoop.scm.ScmConfigKeys - .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_MAX_CONTAINER_REPORT_THREADS; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_MAX_CONTAINER_REPORT_THREADS_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS_DEFAULT; /** diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/InProgressPool.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/InProgressPool.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/InProgressPool.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/InProgressPool.java index 5b2dd0fe78c..ddbd2134026 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/InProgressPool.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/InProgressPool.java @@ -14,19 +14,19 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.replication; +package org.apache.hadoop.hdds.scm.container.replication; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerInfo; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; +import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,12 +43,11 @@ import java.util.stream.Collectors; import static com.google.common.util.concurrent.Uninterruptibles .sleepUninterruptibly; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos - .NodeState.HEALTHY; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos - .NodeState.STALE; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos - .NodeState.INVALID; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .HEALTHY; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .INVALID; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE; /** * These are pools that are actively checking for replication status of the diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/PeriodicPool.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/PeriodicPool.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/PeriodicPool.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/PeriodicPool.java index 35b1e7607ba..ef28aa78d01 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/PeriodicPool.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/PeriodicPool.java @@ -14,7 +14,7 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.replication; +package org.apache.hadoop.hdds.scm.container.replication; import java.util.concurrent.atomic.AtomicLong; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/package-info.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/package-info.java index 82e420272e1..7bbe2efe578 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/replication/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/package-info.java @@ -14,7 +14,7 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container.replication; +package org.apache.hadoop.hdds.scm.container.replication; /* This package contains routines that manage replication of a container. This relies on container reports to understand the replication level of a diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerAttribute.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerAttribute.java similarity index 97% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerAttribute.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerAttribute.java index 1372e7f0cba..288fa2deb14 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerAttribute.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerAttribute.java @@ -15,10 +15,11 @@ * the License. * */ -package org.apache.hadoop.ozone.scm.container.ContainerStates; +package org.apache.hadoop.hdds.scm.container.states; import com.google.common.base.Preconditions; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.container.ContainerID; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,7 +29,7 @@ import java.util.Map; import java.util.NavigableSet; import java.util.TreeSet; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes .FAILED_TO_CHANGE_CONTAINER_STATE; /** diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerState.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerState.java similarity index 83% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerState.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerState.java index d3e4522ea48..1dac36ef771 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerState.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerState.java @@ -16,19 +16,19 @@ * */ -package org.apache.hadoop.ozone.scm.container.ContainerStates; +package org.apache.hadoop.hdds.scm.container.states; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; /** * Class that acts as the container state. */ public class ContainerState { - private final HdslProtos.ReplicationType type; + private final HddsProtos.ReplicationType type; private final String owner; - private final HdslProtos.ReplicationFactor replicationFactor; + private final HddsProtos.ReplicationFactor replicationFactor; /** * Constructs a Container Key. @@ -37,15 +37,15 @@ public class ContainerState { * @param type - Replication Type. * @param factor - Replication Factors */ - public ContainerState(String owner, HdslProtos.ReplicationType type, - HdslProtos.ReplicationFactor factor) { + public ContainerState(String owner, HddsProtos.ReplicationType type, + HddsProtos.ReplicationFactor factor) { this.type = type; this.owner = owner; this.replicationFactor = factor; } - public HdslProtos.ReplicationType getType() { + public HddsProtos.ReplicationType getType() { return type; } @@ -53,7 +53,7 @@ public class ContainerState { return owner; } - public HdslProtos.ReplicationFactor getFactor() { + public HddsProtos.ReplicationFactor getFactor() { return replicationFactor; } diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerStateMap.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerStateMap.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java index fdbf2673ee1..48c6423d02e 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/ContainerStateMap.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java @@ -16,14 +16,15 @@ * */ -package org.apache.hadoop.ozone.scm.container.ContainerStates; +package org.apache.hadoop.hdds.scm.container.states; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.ContainerID; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.util.AutoCloseableLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,11 +35,11 @@ import java.util.Map; import java.util.NavigableSet; import java.util.TreeSet; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes .CONTAINER_EXISTS; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes .FAILED_TO_CHANGE_CONTAINER_STATE; -import static org.apache.hadoop.ozone.scm.exceptions.SCMException.ResultCodes +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes .FAILED_TO_FIND_CONTAINER; /** diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/package-info.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/package-info.java index 6a7e6631711..cf20f396903 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/container/ContainerStates/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/package-info.java @@ -19,4 +19,4 @@ /** * Container States management package. */ -package org.apache.hadoop.ozone.scm.container.ContainerStates; \ No newline at end of file +package org.apache.hadoop.hdds.scm.container.states; \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/exceptions/SCMException.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/SCMException.java similarity index 98% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/exceptions/SCMException.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/SCMException.java index 4fb50e76237..227df3c9e90 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/exceptions/SCMException.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/SCMException.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.exceptions; +package org.apache.hadoop.hdds.scm.exceptions; import java.io.IOException; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/exceptions/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/package-info.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/exceptions/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/package-info.java index 0922382d145..7b69310c239 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/exceptions/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/package-info.java @@ -14,5 +14,5 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.exceptions; +package org.apache.hadoop.hdds.scm.exceptions; // Exceptions thrown by SCM. \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/CommandQueue.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/CommandQueue.java similarity index 99% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/CommandQueue.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/CommandQueue.java index c376efa5641..edbcfa12f20 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/CommandQueue.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/CommandQueue.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/HeartbeatQueueItem.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/HeartbeatQueueItem.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/HeartbeatQueueItem.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/HeartbeatQueueItem.java index fe7ff144211..43720f01044 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/HeartbeatQueueItem.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/HeartbeatQueueItem.java @@ -16,12 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMNodeReport; import static org.apache.hadoop.util.Time.monotonicNow; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodeManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java index c1b2acaf5a9..4392633b16f 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java @@ -15,16 +15,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; import org.apache.hadoop.ozone.protocol.StorageContainerNodeProtocol; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeStat; import java.io.Closeable; import java.util.List; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodeManagerMXBean.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManagerMXBean.java similarity index 97% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodeManagerMXBean.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManagerMXBean.java index 46881c9c60e..3ac993b77d9 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodeManagerMXBean.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManagerMXBean.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodePoolManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodePoolManager.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodePoolManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodePoolManager.java index c330526786d..46faf9ca4d9 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/NodePoolManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodePoolManager.java @@ -16,11 +16,10 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; - -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import java.io.Closeable; import java.io.IOException; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java similarity index 94% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/SCMNodeManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java index 31915b18078..0174c1754f2 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/SCMNodeManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java @@ -15,50 +15,42 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hdds.scm.HddsServerUtil; +import org.apache.hadoop.hdds.scm.StorageContainerManager; +import org.apache.hadoop.hdds.scm.VersionInfo; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.ipc.Server; -import org.apache.hadoop.metrics2.util.MBeans; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.protocol.StorageContainerNodeProtocol; -import org.apache.hadoop.ozone.protocol.VersionResponse; -import org.apache.hadoop.ozone.protocol.commands.ReregisterCommand; -import org.apache.hadoop.ozone.protocol.commands.RegisteredCommand; -import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto .ErrorCode; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMStorageReport; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol - .proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol - .proto.StorageContainerDatanodeProtocolProtos.SCMStorageReport; -import org.apache.hadoop.ozone.scm.StorageContainerManager; -import org.apache.hadoop.ozone.scm.VersionInfo; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeStat; +import org.apache.hadoop.ipc.Server; +import org.apache.hadoop.metrics2.util.MBeans; +import org.apache.hadoop.ozone.protocol.StorageContainerNodeProtocol; +import org.apache.hadoop.ozone.protocol.VersionResponse; +import org.apache.hadoop.ozone.protocol.commands.RegisteredCommand; +import org.apache.hadoop.ozone.protocol.commands.ReregisterCommand; +import org.apache.hadoop.ozone.protocol.commands.SCMCommand; +import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.concurrent.HadoopExecutors; - -import static org.apache.hadoop.ozone.scm.HdslServerUtil.getDeadNodeInterval; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getMaxHBToProcessPerLoop; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmHeartbeatInterval; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmheartbeatCheckerInterval; -import static org.apache.hadoop.ozone.scm.HdslServerUtil.getStaleNodeInterval; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -79,10 +71,12 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.DEAD; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.HEALTHY; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.STALE; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.INVALID; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .HEALTHY; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .INVALID; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE; import static org.apache.hadoop.util.Time.monotonicNow; /** @@ -188,12 +182,12 @@ public class SCMNodeManager // TODO: Support this value as a Percentage of known machines. chillModeNodeCount = 1; - staleNodeIntervalMs = getStaleNodeInterval(conf); - deadNodeIntervalMs = getDeadNodeInterval(conf); + staleNodeIntervalMs = HddsServerUtil.getStaleNodeInterval(conf); + deadNodeIntervalMs = HddsServerUtil.getDeadNodeInterval(conf); heartbeatCheckerIntervalMs = - getScmheartbeatCheckerInterval(conf); - datanodeHBIntervalSeconds = getScmHeartbeatInterval(conf); - maxHBToProcessPerLoop = getMaxHBToProcessPerLoop(conf); + HddsServerUtil.getScmheartbeatCheckerInterval(conf); + datanodeHBIntervalSeconds = HddsServerUtil.getScmHeartbeatInterval(conf); + maxHBToProcessPerLoop = HddsServerUtil.getMaxHBToProcessPerLoop(conf); executorService = HadoopExecutors.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true) diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/SCMNodePoolManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodePoolManager.java similarity index 91% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/SCMNodePoolManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodePoolManager.java index f9b193fe094..a4a6c51cdf4 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/SCMNodePoolManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodePoolManager.java @@ -16,19 +16,17 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; - -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,15 +41,16 @@ import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_MB; +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes + .FAILED_TO_FIND_NODE_IN_POOL; +import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes + .FAILED_TO_LOAD_NODEPOOL; +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; import static org.apache.hadoop.ozone.OzoneConsts.NODEPOOL_DB; -import static org.apache.hadoop.ozone.scm - .exceptions.SCMException.ResultCodes.FAILED_TO_LOAD_NODEPOOL; -import static org.apache.hadoop.ozone.scm - .exceptions.SCMException.ResultCodes.FAILED_TO_FIND_NODE_IN_POOL; -import static org.apache.hadoop.scm - .ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_DEFAULT; -import static org.apache.hadoop.scm - .ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_MB; /** * SCM node pool manager that manges node pools. @@ -106,7 +105,7 @@ public final class SCMNodePoolManager implements NodePoolManager { nodePoolStore.iterate(null, (key, value) -> { try { DatanodeDetails nodeId = DatanodeDetails.getFromProtoBuf( - HdslProtos.DatanodeDetailsProto.PARSER.parseFrom(key)); + HddsProtos.DatanodeDetailsProto.PARSER.parseFrom(key)); String poolName = DFSUtil.bytes2String(value); Set nodePool = null; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/package-info.java similarity index 96% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/package-info.java index f1efe799106..d6a8ad0394e 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/node/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/package-info.java @@ -14,7 +14,7 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; /** * The node package deals with node management. diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/package-info.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/package-info.java index 7686df36108..4669e741ef0 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/package-info.java @@ -15,7 +15,7 @@ * the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; /* * This package contains StorageContainerManager classes. diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/PipelineManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java similarity index 92% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/PipelineManager.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java index 35e1bc103ab..8e435289145 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/PipelineManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java @@ -14,15 +14,14 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.pipelines; +package org.apache.hadoop.hdds.scm.pipelines; - -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/PipelineSelector.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java similarity index 89% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/PipelineSelector.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java index 0eac7f6e249..f0c9eea4416 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/PipelineSelector.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java @@ -14,24 +14,26 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.pipelines; +package org.apache.hadoop.hdds.scm.pipelines; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .ContainerPlacementPolicy; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .SCMContainerPlacementRandom; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.pipelines.ratis.RatisManagerImpl; +import org.apache.hadoop.hdds.scm.pipelines.standalone.StandaloneManagerImpl; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.ContainerPlacementPolicy; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.SCMContainerPlacementRandom; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.pipelines.ratis.RatisManagerImpl; -import org.apache.hadoop.ozone.scm.pipelines.standalone.StandaloneManagerImpl; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -164,7 +166,7 @@ public class PipelineSelector { */ public Pipeline getReplicationPipeline(ReplicationType replicationType, - HdslProtos.ReplicationFactor replicationFactor, String containerName) + HddsProtos.ReplicationFactor replicationFactor, String containerName) throws IOException { PipelineManager manager = getPipelineManager(replicationType); Preconditions.checkNotNull(manager, "Found invalid pipeline manager"); diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/package-info.java similarity index 97% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/package-info.java index c2a3b5490e6..ea24c581a8c 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/package-info.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.pipelines; +package org.apache.hadoop.hdds.scm.pipelines; /** Ozone supports the notion of different kind of pipelines. That means that we can have a replication pipeline build on diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/ratis/RatisManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/RatisManagerImpl.java similarity index 85% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/ratis/RatisManagerImpl.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/RatisManagerImpl.java index c98573e3533..089a1374a4e 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/ratis/RatisManagerImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/RatisManagerImpl.java @@ -14,23 +14,23 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.pipelines.ratis; +package org.apache.hadoop.hdds.scm.pipelines.ratis; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.ozone.scm.container.placement.algorithms +import org.apache.hadoop.hdds.scm.XceiverClientRatis; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.placement.algorithms .ContainerPlacementPolicy; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.pipelines.PipelineManager; -import org.apache.hadoop.ozone.scm.pipelines.PipelineSelector; -import org.apache.hadoop.scm.XceiverClientRatis; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.pipelines.PipelineManager; +import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/ratis/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/package-info.java similarity index 93% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/ratis/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/package-info.java index 6fe9b2821b1..2970fb354bf 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/ratis/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/package-info.java @@ -15,4 +15,4 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.pipelines.ratis; \ No newline at end of file +package org.apache.hadoop.hdds.scm.pipelines.ratis; \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/standalone/StandaloneManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/StandaloneManagerImpl.java similarity index 85% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/standalone/StandaloneManagerImpl.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/StandaloneManagerImpl.java index 023baea815c..82683293512 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/pipelines/standalone/StandaloneManagerImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/StandaloneManagerImpl.java @@ -14,28 +14,29 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.pipelines.standalone; +package org.apache.hadoop.hdds.scm.pipelines.standalone; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.ContainerPlacementPolicy; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.pipelines.PipelineManager; -import org.apache.hadoop.ozone.scm.pipelines.PipelineSelector; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .ContainerPlacementPolicy; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.pipelines.PipelineManager; +import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; -import java.util.List; -import java.util.UUID; -import java.util.Set; import java.util.HashSet; import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.UUID; /** * Standalone Manager Impl to prove that pluggable interface diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java new file mode 100644 index 00000000000..b2c3ca40e50 --- /dev/null +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java @@ -0,0 +1,18 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.pipelines.standalone; \ No newline at end of file diff --git a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/ratis/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ratis/package-info.java similarity index 95% rename from hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/ratis/package-info.java rename to hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ratis/package-info.java index 27fd32b8514..49440175932 100644 --- a/hadoop-hdsl/server-scm/src/main/java/org/apache/hadoop/ozone/scm/ratis/package-info.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ratis/package-info.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.ratis; +package org.apache.hadoop.hdds.scm.ratis; /** * This package contains classes related to Apache Ratis for SCM. diff --git a/hadoop-hdsl/server-scm/src/main/webapps/scm/index.html b/hadoop-hdds/server-scm/src/main/webapps/scm/index.html similarity index 100% rename from hadoop-hdsl/server-scm/src/main/webapps/scm/index.html rename to hadoop-hdds/server-scm/src/main/webapps/scm/index.html diff --git a/hadoop-hdsl/server-scm/src/main/webapps/scm/main.html b/hadoop-hdds/server-scm/src/main/webapps/scm/main.html similarity index 100% rename from hadoop-hdsl/server-scm/src/main/webapps/scm/main.html rename to hadoop-hdds/server-scm/src/main/webapps/scm/main.html diff --git a/hadoop-hdsl/server-scm/src/main/webapps/scm/scm-overview.html b/hadoop-hdds/server-scm/src/main/webapps/scm/scm-overview.html similarity index 100% rename from hadoop-hdsl/server-scm/src/main/webapps/scm/scm-overview.html rename to hadoop-hdds/server-scm/src/main/webapps/scm/scm-overview.html diff --git a/hadoop-hdsl/server-scm/src/main/webapps/scm/scm.js b/hadoop-hdds/server-scm/src/main/webapps/scm/scm.js similarity index 100% rename from hadoop-hdsl/server-scm/src/main/webapps/scm/scm.js rename to hadoop-hdds/server-scm/src/main/webapps/scm/scm.js diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/HdslServerUtilTest.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java similarity index 90% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/HdslServerUtilTest.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java index 5be5ec1364d..6e01e5354b6 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/HdslServerUtilTest.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java @@ -17,37 +17,30 @@ */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.Timeout; import java.net.InetSocketAddress; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.scm.ScmConfigKeys; - -import static org.apache.hadoop.hdsl.HdslUtils.getSCMAddresses; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmAddressForDataNodes; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmClientBindAddress; -import static org.apache.hadoop.ozone.scm.HdslServerUtil - .getScmDataNodeBindAddress; +import static org.apache.hadoop.hdds.HddsUtils.getSCMAddresses; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.rules.Timeout; /** - * Test the HDSL server side utilities. + * Test the HDDS server side utilities. */ -public class HdslServerUtilTest { +public class HddsServerUtilTest { @Rule public Timeout timeout = new Timeout(300000); @@ -63,7 +56,7 @@ public class HdslServerUtilTest { public void testMissingScmDataNodeAddress() { final Configuration conf = new OzoneConfiguration(); thrown.expect(IllegalArgumentException.class); - getScmAddressForDataNodes(conf); + HddsServerUtil.getScmAddressForDataNodes(conf); } /** @@ -78,7 +71,7 @@ public class HdslServerUtilTest { // First try a client address with just a host name. Verify it falls // back to the default port. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4"); - InetSocketAddress addr = getScmAddressForDataNodes(conf); + InetSocketAddress addr = HddsServerUtil.getScmAddressForDataNodes(conf); assertThat(addr.getHostString(), is("1.2.3.4")); assertThat(addr.getPort(), is( ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); @@ -86,7 +79,7 @@ public class HdslServerUtilTest { // Next try a client address with just a host name and port. // Verify the port is ignored and the default DataNode port is used. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); - addr = getScmAddressForDataNodes(conf); + addr = HddsServerUtil.getScmAddressForDataNodes(conf); assertThat(addr.getHostString(), is("1.2.3.4")); assertThat(addr.getPort(), is( ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); @@ -98,7 +91,7 @@ public class HdslServerUtilTest { conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "5.6.7.8"); addr = - getScmAddressForDataNodes(conf); + HddsServerUtil.getScmAddressForDataNodes(conf); assertThat(addr.getHostString(), is("5.6.7.8")); assertThat(addr.getPort(), is( ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); @@ -109,7 +102,7 @@ public class HdslServerUtilTest { // used. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "5.6.7.8:200"); - addr = getScmAddressForDataNodes(conf); + addr = HddsServerUtil.getScmAddressForDataNodes(conf); assertThat(addr.getHostString(), is("5.6.7.8")); assertThat(addr.getPort(), is(200)); } @@ -126,7 +119,7 @@ public class HdslServerUtilTest { // The bind host should be 0.0.0.0 unless OZONE_SCM_CLIENT_BIND_HOST_KEY // is set differently. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4"); - InetSocketAddress addr = getScmClientBindAddress(conf); + InetSocketAddress addr = HddsServerUtil.getScmClientBindAddress(conf); assertThat(addr.getHostString(), is("0.0.0.0")); assertThat(addr.getPort(), is(ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT)); @@ -135,7 +128,7 @@ public class HdslServerUtilTest { // should be respected. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200"); - addr = getScmClientBindAddress(conf); + addr = HddsServerUtil.getScmClientBindAddress(conf); assertThat(addr.getHostString(), is("0.0.0.0")); assertThat(addr.getPort(), is(100)); @@ -145,7 +138,7 @@ public class HdslServerUtilTest { conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4"); conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY, "5.6.7.8"); - addr = getScmClientBindAddress(conf); + addr = HddsServerUtil.getScmClientBindAddress(conf); assertThat(addr.getHostString(), is("5.6.7.8")); assertThat(addr.getPort(), is( ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT)); @@ -156,7 +149,7 @@ public class HdslServerUtilTest { conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200"); conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY, "5.6.7.8"); - addr = getScmClientBindAddress(conf); + addr = HddsServerUtil.getScmClientBindAddress(conf); assertThat(addr.getHostString(), is("5.6.7.8")); assertThat(addr.getPort(), is(100)); } @@ -172,7 +165,7 @@ public class HdslServerUtilTest { // The bind host should be 0.0.0.0 unless OZONE_SCM_DATANODE_BIND_HOST_KEY // is set differently. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4"); - InetSocketAddress addr = getScmDataNodeBindAddress(conf); + InetSocketAddress addr = HddsServerUtil.getScmDataNodeBindAddress(conf); assertThat(addr.getHostString(), is("0.0.0.0")); assertThat(addr.getPort(), is( ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); @@ -182,7 +175,7 @@ public class HdslServerUtilTest { // should be respected. conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200"); - addr = getScmDataNodeBindAddress(conf); + addr = HddsServerUtil.getScmDataNodeBindAddress(conf); assertThat(addr.getHostString(), is("0.0.0.0")); assertThat(addr.getPort(), is(200)); @@ -192,7 +185,7 @@ public class HdslServerUtilTest { conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_KEY, "5.6.7.8"); - addr = getScmDataNodeBindAddress(conf); + addr = HddsServerUtil.getScmDataNodeBindAddress(conf); assertThat(addr.getHostString(), is("5.6.7.8")); assertThat(addr.getPort(), is( ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); @@ -203,7 +196,7 @@ public class HdslServerUtilTest { conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200"); conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_KEY, "5.6.7.8"); - addr = getScmDataNodeBindAddress(conf); + addr = HddsServerUtil.getScmDataNodeBindAddress(conf); assertThat(addr.getHostString(), is("5.6.7.8")); assertThat(addr.getPort(), is(200)); } diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/TestStorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManagerHttpServer.java similarity index 98% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/TestStorageContainerManagerHttpServer.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManagerHttpServer.java index aeb57055a88..5d9139d72c0 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/TestStorageContainerManagerHttpServer.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManagerHttpServer.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; @@ -25,7 +25,6 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.scm.ScmConfigKeys; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/TestUtils.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java similarity index 95% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/TestUtils.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java index 3385fd6e4c9..e191dd54a1b 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/TestUtils.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java @@ -14,16 +14,16 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; + +import org.apache.hadoop.hdds.scm.node.SCMNodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.UUID; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.scm.node.SCMNodeManager; - /** * Stateless helper functions to handler scm/datanode connection. */ diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/block/TestBlockManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java similarity index 87% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/block/TestBlockManager.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java index c28f8359c8b..0eff702cded 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/block/TestBlockManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java @@ -15,18 +15,18 @@ * the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.container.ContainerMapping; +import org.apache.hadoop.hdds.scm.container.MockNodeManager; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.SCMTestUtils; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.ozone.scm.container.ContainerMapping; -import org.apache.hadoop.ozone.scm.container.MockNodeManager; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Assert; @@ -55,8 +55,8 @@ public class TestBlockManager { private static BlockManagerImpl blockManager; private static File testDir; private final static long DEFAULT_BLOCK_SIZE = 128 * MB; - private static HdslProtos.ReplicationFactor factor; - private static HdslProtos.ReplicationType type; + private static HddsProtos.ReplicationFactor factor; + private static HddsProtos.ReplicationType type; private static String containerOwner = "OZONE"; @Rule @@ -79,11 +79,11 @@ public class TestBlockManager { blockManager = new BlockManagerImpl(conf, nodeManager, mapping, 128); if(conf.getBoolean(ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY, ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT)){ - factor = HdslProtos.ReplicationFactor.THREE; - type = HdslProtos.ReplicationType.RATIS; + factor = HddsProtos.ReplicationFactor.THREE; + type = HddsProtos.ReplicationType.RATIS; } else { - factor = HdslProtos.ReplicationFactor.ONE; - type = HdslProtos.ReplicationType.STAND_ALONE; + factor = HddsProtos.ReplicationFactor.ONE; + type = HddsProtos.ReplicationType.STAND_ALONE; } } diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/block/TestDeletedBlockLog.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java similarity index 93% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/block/TestDeletedBlockLog.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java index 6e883391b1a..e820fa44278 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/block/TestDeletedBlockLog.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java @@ -15,22 +15,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.block; +package org.apache.hadoop.hdds.scm.block; import org.apache.commons.io.FileUtils; +import org.apache.hadoop.hdds.scm.container.ContainerMapping; +import org.apache.hadoop.hdds.scm.container.Mapping; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; -import org.apache.hadoop.ozone.scm.container.ContainerMapping; -import org.apache.hadoop.ozone.scm.container.Mapping; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.utils.MetadataStore; @@ -52,8 +52,9 @@ import java.util.Random; import java.util.UUID; import java.util.stream.Collectors; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_BLOCK_DELETION_MAX_RETRY; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_BLOCK_DELETION_MAX_RETRY; import static org.mockito.Mockito.mock; /** diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java similarity index 91% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/MockNodeManager.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java index 587e60e8e9a..e3473b3634c 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/MockNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java @@ -14,31 +14,28 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container; +package org.apache.hadoop.hdds.scm.container; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMStorageReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.protocol.VersionResponse; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMStorageReport; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; - -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeStat; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; - -import static org.apache.hadoop.ozone.scm.TestUtils.getDatanodeDetails; -import org.mockito.Mockito; import org.assertj.core.util.Preconditions; +import org.mockito.Mockito; import java.io.IOException; import java.util.HashMap; @@ -47,11 +44,11 @@ import java.util.List; import java.util.Map; import java.util.UUID; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.DEAD; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState +import static org.apache.hadoop.hdds.scm.TestUtils.getDatanodeDetails; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState .HEALTHY; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState - .STALE; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE; /** * Test Helper for testing container Mapping. @@ -150,7 +147,7 @@ public class MockNodeManager implements NodeManager { * @return List of Datanodes that are Heartbeating SCM. */ @Override - public List getNodes(HdslProtos.NodeState nodestate) { + public List getNodes(HddsProtos.NodeState nodestate) { if (nodestate == HEALTHY) { return healthyNodes; } @@ -173,7 +170,7 @@ public class MockNodeManager implements NodeManager { * @return int -- count */ @Override - public int getNodeCount(HdslProtos.NodeState nodestate) { + public int getNodeCount(HddsProtos.NodeState nodestate) { List nodes = getNodes(nodestate); if (nodes != null) { return nodes.size(); @@ -299,7 +296,7 @@ public class MockNodeManager implements NodeManager { * @return Healthy/Stale/Dead. */ @Override - public HdslProtos.NodeState getNodeState(DatanodeDetails dd) { + public HddsProtos.NodeState getNodeState(DatanodeDetails dd) { return null; } @@ -379,7 +376,7 @@ public class MockNodeManager implements NodeManager { * @return SCMHeartbeatResponseProto */ @Override - public SCMCommand register(HdslProtos.DatanodeDetailsProto datanodeDetails) { + public SCMCommand register(HddsProtos.DatanodeDetailsProto datanodeDetails) { return null; } @@ -393,7 +390,7 @@ public class MockNodeManager implements NodeManager { */ @Override public List sendHeartbeat( - HdslProtos.DatanodeDetailsProto datanodeDetails, + HddsProtos.DatanodeDetailsProto datanodeDetails, SCMNodeReport nodeReport, ReportState containerReportState) { if ((datanodeDetails != null) && (nodeReport != null) && (nodeReport .getStorageReportCount() > 0)) { @@ -421,7 +418,7 @@ public class MockNodeManager implements NodeManager { @Override public Map getNodeCount() { Map nodeCountMap = new HashMap(); - for (HdslProtos.NodeState state : HdslProtos.NodeState.values()) { + for (HddsProtos.NodeState state : HddsProtos.NodeState.values()) { nodeCountMap.put(state.toString(), getNodeCount(state)); } return nodeCountMap; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/TestContainerMapping.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java similarity index 91% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/TestContainerMapping.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java index cec02de2183..200a611d0f8 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/TestContainerMapping.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java @@ -14,26 +14,24 @@ * License for the specific language governing permissions and limitations under * the License. */ -package org.apache.hadoop.ozone.scm.container; +package org.apache.hadoop.hdds.scm.container; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.SCMTestUtils; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerID; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; import org.apache.hadoop.test.GenericTestUtils; - import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; @@ -187,7 +185,7 @@ public class TestContainerMapping { containerName, containerOwner); mapping.updateContainerState(containerInfo.getContainerName(), - HdslProtos.LifeCycleEvent.CREATE); + HddsProtos.LifeCycleEvent.CREATE); Thread.sleep(TIMEOUT + 1000); NavigableSet deleteContainers = mapping.getStateManager() @@ -195,13 +193,13 @@ public class TestContainerMapping { "OZONE", xceiverClientManager.getType(), xceiverClientManager.getFactor(), - HdslProtos.LifeCycleState.DELETING); + HddsProtos.LifeCycleState.DELETING); Assert.assertTrue(deleteContainers.contains(containerInfo.containerID())); thrown.expect(IOException.class); thrown.expectMessage("Lease Exception"); mapping.updateContainerState(containerInfo.getContainerName(), - HdslProtos.LifeCycleEvent.CREATED); + HddsProtos.LifeCycleEvent.CREATED); } @Test @@ -282,7 +280,7 @@ public class TestContainerMapping { containerOwner, xceiverClientManager.getType(), xceiverClientManager.getFactor(), - HdslProtos.LifeCycleState.CLOSING); + HddsProtos.LifeCycleState.CLOSING); Assert.assertTrue( pendingCloseContainers.contains(updatedContainer.containerID())); } @@ -292,22 +290,22 @@ public class TestContainerMapping { String containerName = UUID.randomUUID().toString(); ContainerInfo info = createContainer(containerName); mapping.updateContainerState(containerName, - HdslProtos.LifeCycleEvent.FINALIZE); + HddsProtos.LifeCycleEvent.FINALIZE); NavigableSet pendingCloseContainers = mapping.getStateManager() .getMatchingContainerIDs( containerOwner, xceiverClientManager.getType(), xceiverClientManager.getFactor(), - HdslProtos.LifeCycleState.CLOSING); + HddsProtos.LifeCycleState.CLOSING); Assert.assertTrue(pendingCloseContainers.contains(info.containerID())); mapping.updateContainerState(containerName, - HdslProtos.LifeCycleEvent.CLOSE); + HddsProtos.LifeCycleEvent.CLOSE); NavigableSet closeContainers = mapping.getStateManager() .getMatchingContainerIDs( containerOwner, xceiverClientManager.getType(), xceiverClientManager.getFactor(), - HdslProtos.LifeCycleState.CLOSED); + HddsProtos.LifeCycleState.CLOSED); Assert.assertTrue(closeContainers.contains(info.containerID())); } @@ -326,9 +324,9 @@ public class TestContainerMapping { containerName, containerOwner); mapping.updateContainerState(containerInfo.getContainerName(), - HdslProtos.LifeCycleEvent.CREATE); + HddsProtos.LifeCycleEvent.CREATE); mapping.updateContainerState(containerInfo.getContainerName(), - HdslProtos.LifeCycleEvent.CREATED); + HddsProtos.LifeCycleEvent.CREATED); return containerInfo; } diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/closer/TestContainerCloser.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/closer/TestContainerCloser.java similarity index 83% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/closer/TestContainerCloser.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/closer/TestContainerCloser.java index 4a797b2783a..2fec2324215 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/closer/TestContainerCloser.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/closer/TestContainerCloser.java @@ -16,22 +16,24 @@ * */ -package org.apache.hadoop.ozone.scm.container.closer; +package org.apache.hadoop.hdds.scm.container.closer; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.container.ContainerMapping; +import org.apache.hadoop.hdds.scm.container.MockNodeManager; +import org.apache.hadoop.hdds.scm.container.TestContainerMapping; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.SCMTestUtils; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.container.ContainerMapping; -import org.apache.hadoop.ozone.scm.container.MockNodeManager; -import org.apache.hadoop.ozone.scm.container.TestContainerMapping; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Assert; @@ -42,11 +44,16 @@ import java.io.File; import java.io.IOException; import java.util.concurrent.TimeUnit; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CONTAINER_REPORT_INTERVAL; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleEvent.CREATE; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleEvent.CREATED; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_SIZE_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_SIZE_GB; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent + .CREATE; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent + .CREATED; +import static org.apache.hadoop.ozone.OzoneConfigKeys + .OZONE_CONTAINER_REPORT_INTERVAL; /** * Test class for Closing Container. @@ -88,8 +95,8 @@ public class TestContainerCloser { String containerName = "container-" + RandomStringUtils.randomNumeric(5); ContainerInfo info = mapping.allocateContainer( - HdslProtos.ReplicationType.STAND_ALONE, - HdslProtos.ReplicationFactor.ONE, containerName, "ozone"); + HddsProtos.ReplicationType.STAND_ALONE, + HddsProtos.ReplicationFactor.ONE, containerName, "ozone"); //Execute these state transitions so that we can close the container. mapping.updateContainerState(containerName, CREATE); @@ -136,8 +143,8 @@ public class TestContainerCloser { String containerName = "container-" + RandomStringUtils.randomNumeric(5); ContainerInfo info = mapping.allocateContainer( - HdslProtos.ReplicationType.STAND_ALONE, - HdslProtos.ReplicationFactor.ONE, containerName, "ozone"); + HddsProtos.ReplicationType.STAND_ALONE, + HddsProtos.ReplicationFactor.ONE, containerName, "ozone"); //Execute these state transitions so that we can close the container. mapping.updateContainerState(containerName, CREATE); @@ -182,8 +189,8 @@ public class TestContainerCloser { for (int x = 0; x < ContainerCloser.getCleanupWaterMark() + 10; x++) { String containerName = "container-" + RandomStringUtils.randomNumeric(7); ContainerInfo info = mapping.allocateContainer( - HdslProtos.ReplicationType.STAND_ALONE, - HdslProtos.ReplicationFactor.ONE, containerName, "ozone"); + HddsProtos.ReplicationType.STAND_ALONE, + HddsProtos.ReplicationFactor.ONE, containerName, "ozone"); mapping.updateContainerState(containerName, CREATE); mapping.updateContainerState(containerName, CREATED); sendContainerReport(info, 5 * GIGABYTE); diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/ContainerStates/TestContainerAttribute.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states/TestContainerAttribute.java similarity index 96% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/ContainerStates/TestContainerAttribute.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states/TestContainerAttribute.java index 33c8f39a90b..63cc9bfd789 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/container/ContainerStates/TestContainerAttribute.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states/TestContainerAttribute.java @@ -16,9 +16,10 @@ * */ -package org.apache.hadoop.ozone.scm.container.ContainerStates; +package org.apache.hadoop.hdds.scm.container.states; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.container.ContainerID; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestContainerPlacement.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestContainerPlacement.java similarity index 84% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestContainerPlacement.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestContainerPlacement.java index b0f47f589ab..ad50d971b31 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestContainerPlacement.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestContainerPlacement.java @@ -16,28 +16,30 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.container.ContainerMapping; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .ContainerPlacementPolicy; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .SCMContainerPlacementCapacity; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMStorageReport; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.container.ContainerMapping; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.ContainerPlacementPolicy; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.SCMContainerPlacementCapacity; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.ozone.OzoneConfigKeys; +import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; import org.junit.Rule; @@ -50,10 +52,12 @@ import java.util.List; import java.util.UUID; import java.util.concurrent.TimeoutException; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DB_CACHE_SIZE_MB; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState .HEALTHY; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_MB; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestNodeManager.java similarity index 97% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestNodeManager.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestNodeManager.java index 6c821d31418..de6e30c9472 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestNodeManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestNodeManager.java @@ -15,31 +15,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import com.google.common.base.Supplier; -import static java.util.concurrent.TimeUnit.*; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMStorageReport; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeStat; -import org.apache.hadoop.scm.ScmConfigKeys; +import org.apache.hadoop.ozone.OzoneConfigKeys; +import org.apache.hadoop.ozone.protocol.commands.SCMCommand; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; - import org.hamcrest.CoreMatchers; -import org.junit.Assert; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; @@ -54,22 +52,24 @@ import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.DEAD; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState - .HEALTHY; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState - .STALE; -import static org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; - -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DEADNODE_INTERVAL; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_INTERVAL; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_DEADNODE_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_HEARTBEAT_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_MAX_HB_COUNT_TO_PROCESS; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_STALENODE_INTERVAL; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .HEALTHY; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE; +import static org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMCmdType; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.StringStartsWith.startsWith; @@ -729,7 +729,7 @@ public class TestNodeManager { * @return true if we found the expected number. */ private boolean findNodes(NodeManager nodeManager, int count, - HdslProtos.NodeState state) { + HddsProtos.NodeState state) { return count == nodeManager.getNodeCount(state); } diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestSCMNodePoolManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodePoolManager.java similarity index 92% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestSCMNodePoolManager.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodePoolManager.java index f12e8314033..8f412deddad 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/node/TestSCMNodePoolManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodePoolManager.java @@ -16,19 +16,20 @@ * limitations under the License. */ -package org.apache.hadoop.ozone.scm.node; +package org.apache.hadoop.hdds.scm.node; import org.apache.commons.collections.ListUtils; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .ContainerPlacementPolicy; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .SCMContainerPlacementCapacity; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.ContainerPlacementPolicy; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.SCMContainerPlacementCapacity; -import org.apache.hadoop.scm.ScmConfigKeys; import org.apache.hadoop.test.PathUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/package-info.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/package-info.java similarity index 95% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/package-info.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/package-info.java index 0349f5d774e..da05c59acfc 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/scm/package-info.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/package-info.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm; +package org.apache.hadoop.hdds.scm; /** * SCM tests */ diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java similarity index 95% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java index 6f8cc2d915b..433beb8d51c 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java @@ -19,8 +19,26 @@ package org.apache.hadoop.ozone.container.common; import org.apache.commons.codec.digest.DigestUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.VersionInfo; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMStorageReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; @@ -36,47 +54,26 @@ import org.apache.hadoop.ozone.container.common.states.endpoint .RegisterEndpointTask; import org.apache.hadoop.ozone.container.common.states.endpoint .VersionEndpointTask; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMStorageReport; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.VersionInfo; import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.util.Time; - -import static org.apache.hadoop.ozone.container.common.ContainerTestUtils - .createEndpoint; -import static org.apache.hadoop.ozone.scm.TestUtils.getDatanodeDetails; -import static org.hamcrest.Matchers.lessThanOrEqualTo; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; - import java.io.File; import java.net.InetSocketAddress; import java.util.UUID; +import static org.apache.hadoop.hdds.scm.TestUtils.getDatanodeDetails; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; -import static org.apache.hadoop.ozone.OzoneConfigKeys - .OZONE_METADATA_DIRS; -import static org.apache.hadoop.hdsl.protocol.proto +import static org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ReportState.states .noContainerReports; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS; +import static org.apache.hadoop.ozone.container.common.ContainerTestUtils + .createEndpoint; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * Tests the endpoints. @@ -231,7 +228,7 @@ public class TestEndPoint { new RegisterEndpointTask(rpcEndPoint, conf); if (!clearDatanodeDetails) { DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(); - HdslProtos.DatanodeDetailsProto datanodeDetailsProto = + HddsProtos.DatanodeDetailsProto datanodeDetailsProto = datanodeDetails.getProtoBufMessage(); endpointTask.setDatanodeDetailsProto(datanodeDetailsProto); } @@ -318,7 +315,7 @@ public class TestEndPoint { TestUtils.getDatanodeDetails(), conf); EndpointStateMachine rpcEndPoint = createEndpoint(conf, scmAddress, rpcTimeout)) { - HdslProtos.DatanodeDetailsProto datanodeDetailsProto = + HddsProtos.DatanodeDetailsProto datanodeDetailsProto = getDatanodeDetails().getProtoBufMessage(); rpcEndPoint.setState(EndpointStateMachine.EndPointStates.HEARTBEAT); diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java similarity index 90% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java index 2947789b0fe..0801c255a72 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java @@ -18,20 +18,23 @@ package org.apache.hadoop.ozone.container.placement; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.container.MockNodeManager; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .SCMContainerPlacementCapacity; +import org.apache.hadoop.hdds.scm.container.placement.algorithms + .SCMContainerPlacementRandom; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.ozone.scm.container.MockNodeManager; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.SCMContainerPlacementCapacity; -import org.apache.hadoop.ozone.scm.container.placement.algorithms.SCMContainerPlacementRandom; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodeManager; import org.junit.Assert; import org.junit.Test; import java.util.List; import java.util.Random; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.HEALTHY; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .HEALTHY; import static org.junit.Assert.assertEquals; /** diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java similarity index 93% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java index ccc23c32243..7150d1b94f4 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java @@ -17,8 +17,8 @@ package org.apache.hadoop.ozone.container.placement; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeStat; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java similarity index 91% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java index a35d4d4ed8b..8eb07e61c24 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java @@ -17,28 +17,30 @@ package org.apache.hadoop.ozone.container.replication; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.container.testutils.ReplicationDatanodeStateManager; -import org.apache.hadoop.ozone.container.testutils.ReplicationNodeManagerMock; -import org.apache.hadoop.ozone.container.testutils.ReplicationNodePoolManagerMock; -import org.apache.hadoop.ozone.container.common.SCMTestUtils; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.hdsl.protocol.proto +import org.apache.hadoop.hdds.scm.TestUtils; +import org.apache.hadoop.hdds.scm.container.replication.ContainerSupervisor; +import org.apache.hadoop.hdds.scm.container.replication.InProgressPool; +import org.apache.hadoop.hdds.scm.node.CommandQueue; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.ozone.scm.TestUtils; -import org.apache.hadoop.ozone.scm.container.replication.ContainerSupervisor; -import org.apache.hadoop.ozone.scm.container.replication.InProgressPool; -import org.apache.hadoop.ozone.scm.node.CommandQueue; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; +import org.apache.hadoop.ozone.container.common.SCMTestUtils; +import org.apache.hadoop.ozone.container.testutils + .ReplicationDatanodeStateManager; +import org.apache.hadoop.ozone.container.testutils.ReplicationNodeManagerMock; +import org.apache.hadoop.ozone.container.testutils + .ReplicationNodePoolManagerMock; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.LogCapturer; -import org.slf4j.event.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; import java.io.IOException; import java.util.HashMap; @@ -48,11 +50,12 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.HEALTHY; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT; -import static org.apache.hadoop.scm.ScmConfigKeys +import static org.apache.hadoop.hdds.scm.ScmConfigKeys .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .HEALTHY; import static org.apache.ratis.shaded.com.google.common.util.concurrent .Uninterruptibles.sleepUninterruptibly; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java similarity index 100% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java similarity index 87% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java index 55b8e6ddb34..26f35144975 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java @@ -17,17 +17,20 @@ package org.apache.hadoop.ozone.container.testutils; import org.apache.commons.codec.digest.DigestUtils; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.ozone.scm.node.NodeManager; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerInfo; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; import java.util.LinkedList; import java.util.List; import java.util.Random; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState.HEALTHY; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState + .HEALTHY; /** * This class manages the state of datanode diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java similarity index 91% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java index f8a64320fef..f2db75104f0 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java @@ -17,31 +17,30 @@ package org.apache.hadoop.ozone.container.testutils; import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric; +import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat; +import org.apache.hadoop.hdds.scm.node.CommandQueue; +import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.ReportState; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMNodeReport; +import org.apache.hadoop.hdds.protocol.proto + .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; import org.apache.hadoop.ozone.protocol.VersionResponse; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeMetric; -import org.apache.hadoop.ozone.scm.container.placement.metrics.SCMNodeStat; -import org.apache.hadoop.ozone.scm.node.CommandQueue; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; -import org.apache.hadoop.ozone.scm.node.NodeManager; +import org.mockito.Mockito; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.UUID; -import org.mockito.Mockito; - /** * A Node Manager to test replication. */ @@ -284,7 +283,7 @@ public class ReplicationNodeManagerMock implements NodeManager { * @return SCMHeartbeatResponseProto */ @Override - public SCMCommand register(HdslProtos.DatanodeDetailsProto dd) { + public SCMCommand register(HddsProtos.DatanodeDetailsProto dd) { return null; } @@ -297,7 +296,7 @@ public class ReplicationNodeManagerMock implements NodeManager { * @return SCMheartbeat response list */ @Override - public List sendHeartbeat(HdslProtos.DatanodeDetailsProto dd, + public List sendHeartbeat(HddsProtos.DatanodeDetailsProto dd, SCMNodeReport nodeReport, ReportState containerReportState) { return null; } diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java similarity index 95% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java index 766a882799f..ffcd752e84a 100644 --- a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java @@ -16,9 +16,9 @@ */ package org.apache.hadoop.ozone.container.testutils; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.ozone.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.node.NodePoolManager; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import java.io.IOException; import java.util.ArrayList; diff --git a/hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java similarity index 100% rename from hadoop-hdsl/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java rename to hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java diff --git a/hadoop-hdsl/tools/pom.xml b/hadoop-hdds/tools/pom.xml similarity index 84% rename from hadoop-hdsl/tools/pom.xml rename to hadoop-hdds/tools/pom.xml index 7a5eb0a00b8..b2823ef1804 100644 --- a/hadoop-hdsl/tools/pom.xml +++ b/hadoop-hdds/tools/pom.xml @@ -19,29 +19,30 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.apache.hadoop - hadoop-hdsl + hadoop-hdds 3.2.0-SNAPSHOT - hadoop-hdsl-tools + + hadoop-hdds-tools 3.2.0-SNAPSHOT - Apache Hadoop Hdsl Tools - Apache Hadoop HDSL tools + Apache HDDS Tools + Apache Hadoop HDDS tools jar - hdsl + hdds true org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided org.apache.hadoop - hadoop-hdsl-client + hadoop-hdds-client provided diff --git a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/OzoneBaseCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java similarity index 97% rename from hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/OzoneBaseCLI.java rename to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java index eeec3b6459d..727c81a0d83 100644 --- a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/OzoneBaseCLI.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; +package org.apache.hadoop.hdds.scm.cli; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; diff --git a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/OzoneCommandHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java similarity index 96% rename from hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/OzoneCommandHandler.java rename to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java index 1455c131e15..f9b8fcd6350 100644 --- a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/OzoneCommandHandler.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java @@ -15,10 +15,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; +package org.apache.hadoop.hdds.scm.cli; import org.apache.commons.cli.CommandLine; -import org.apache.hadoop.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.client.ScmClient; import java.io.IOException; import java.io.PrintStream; diff --git a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/ResultCode.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java similarity index 96% rename from hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/ResultCode.java rename to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java index 7b8704aa7f3..27df88cf825 100644 --- a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/ResultCode.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; +package org.apache.hadoop.hdds.scm.cli; /** * The possible result code of SCM CLI. diff --git a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SCMCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java similarity index 84% rename from hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SCMCLI.java rename to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java index c3a1a1d64e3..34553ed371d 100644 --- a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SCMCLI.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; +package org.apache.hadoop.hdds.scm.cli; import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.CommandLine; @@ -23,18 +23,20 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.cli.container.ContainerCommandHandler; +import org.apache.hadoop.hdds.scm.cli.container.CreateContainerHandler; +import org.apache.hadoop.hdds.scm.client.ContainerOperationClient; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.protocolPB + .StorageContainerLocationProtocolClientSideTranslatorPB; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.ozone.scm.cli.container.ContainerCommandHandler; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.client.ContainerOperationClient; -import org.apache.hadoop.scm.client.ScmClient; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ToolRunner; @@ -44,13 +46,14 @@ import java.net.InetSocketAddress; import java.net.URISyntaxException; import java.util.Arrays; -import static org.apache.hadoop.hdsl.HdslUtils.getScmAddressForClients; -import static org.apache.hadoop.ozone.scm.cli.ResultCode.EXECUTION_ERROR; -import static org.apache.hadoop.ozone.scm.cli.ResultCode.SUCCESS; -import static org.apache.hadoop.ozone.scm.cli.ResultCode.UNRECOGNIZED_CMD; -import static org.apache.hadoop.ozone.scm.cli.container.ContainerCommandHandler.CONTAINER_CMD; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT; -import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_SIZE_DEFAULT; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys + .OZONE_SCM_CONTAINER_SIZE_GB; +import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients; +import static org.apache.hadoop.hdds.scm.cli.ResultCode.EXECUTION_ERROR; +import static org.apache.hadoop.hdds.scm.cli.ResultCode.SUCCESS; +import static org.apache.hadoop.hdds.scm.cli.ResultCode.UNRECOGNIZED_CMD; /** * This class is the CLI of SCM. @@ -126,7 +129,7 @@ public class SCMCLI extends OzoneBaseCLI { * of commands, the options are added in a cascading manner, e.g.: * {@link SCMCLI} asks {@link ContainerCommandHandler} to add it's options, * which then asks it's sub command, such as - * {@link org.apache.hadoop.ozone.scm.cli.container.CreateContainerHandler} + * {@link CreateContainerHandler} * to add it's own options. * * We need to do this because {@link BasicParser} need to take all the options @@ -146,7 +149,7 @@ public class SCMCLI extends OzoneBaseCLI { private static void addTopLevelOptions(Options options) { Option containerOps = new Option( - CONTAINER_CMD, false, "Container related options"); + ContainerCommandHandler.CONTAINER_CMD, false, "Container related options"); options.addOption(containerOps); // TODO : add pool, node and pipeline commands. } @@ -202,7 +205,7 @@ public class SCMCLI extends OzoneBaseCLI { throws IOException, URISyntaxException { OzoneCommandHandler handler = null; try { - if (cmd.hasOption(CONTAINER_CMD)) { + if (cmd.hasOption(ContainerCommandHandler.CONTAINER_CMD)) { handler = new ContainerCommandHandler(scmClient); } diff --git a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/container/CloseContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java similarity index 84% rename from hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/container/CloseContainerHandler.java rename to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java index 7a523053734..ba420230fe3 100644 --- a/hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/container/CloseContainerHandler.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java @@ -15,21 +15,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli.container; +package org.apache.hadoop.hdds.scm.cli.container; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.apache.hadoop.ozone.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.scm.client.ScmClient; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; +import org.apache.hadoop.hdds.scm.cli.SCMCLI; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; import java.io.IOException; -import static org.apache.hadoop.ozone.scm.cli.SCMCLI.CMD_WIDTH; -import static org.apache.hadoop.ozone.scm.cli.SCMCLI.HELP_OP; - /** * The handler of close container command. */ @@ -45,7 +43,7 @@ public class CloseContainerHandler extends OzoneCommandHandler { } if (!cmd.hasOption(OPT_CONTAINER_NAME)) { displayHelp(); - if (!cmd.hasOption(HELP_OP)) { + if (!cmd.hasOption(SCMCLI.HELP_OP)) { throw new IOException("Expecting container name"); } else { return; @@ -68,7 +66,7 @@ public class CloseContainerHandler extends OzoneCommandHandler { Options options = new Options(); addOptions(options); HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -close

* Based on the Datanode http serer. */ @@ -110,11 +112,11 @@ public class ObjectStoreRestHttpServer implements Closeable { }); this.httpServer.childOption(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, - conf.getInt(ScmConfigKeys.HDSL_REST_NETTY_HIGH_WATERMARK, - ScmConfigKeys.HDSL_REST_NETTY_HIGH_WATERMARK_DEFAULT)); + conf.getInt(ScmConfigKeys.HDDS_REST_NETTY_HIGH_WATERMARK, + ScmConfigKeys.HDDS_REST_NETTY_HIGH_WATERMARK_DEFAULT)); this.httpServer.childOption(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, - conf.getInt(ScmConfigKeys.HDSL_REST_NETTY_LOW_WATERMARK, - ScmConfigKeys.HDSL_REST_NETTY_LOW_WATERMARK_DEFAULT)); + conf.getInt(ScmConfigKeys.HDDS_REST_NETTY_LOW_WATERMARK, + ScmConfigKeys.HDDS_REST_NETTY_LOW_WATERMARK_DEFAULT)); if (externalHttpChannel == null) { httpServer.channel(NioServerSocketChannel.class); @@ -141,8 +143,8 @@ public class ObjectStoreRestHttpServer implements Closeable { if (httpServer != null) { InetSocketAddress infoAddr = NetUtils.createSocketAddr( - conf.getTrimmed(HDSL_REST_HTTP_ADDRESS_KEY, - HDSL_REST_HTTP_ADDRESS_DEFAULT)); + conf.getTrimmed(HDDS_REST_HTTP_ADDRESS_KEY, + HDDS_REST_HTTP_ADDRESS_DEFAULT)); ChannelFuture f = httpServer.bind(infoAddr); try { @@ -156,7 +158,7 @@ public class ObjectStoreRestHttpServer implements Closeable { } } httpAddress = (InetSocketAddress) f.channel().localAddress(); - LOG.info("Listening HDSL REST traffic on " + httpAddress); + LOG.info("Listening HDDS REST traffic on " + httpAddress); } } @@ -183,8 +185,8 @@ public class ObjectStoreRestHttpServer implements Closeable { */ private static RestCsrfPreventionFilter createRestCsrfPreventionFilter( Configuration conf) { - if (!conf.getBoolean(HDSL_REST_CSRF_ENABLED_KEY, - HDSL_REST_CSRF_ENABLED_DEFAULT)) { + if (!conf.getBoolean(HDDS_REST_CSRF_ENABLED_KEY, + HDDS_REST_CSRF_ENABLED_DEFAULT)) { return null; } String restCsrfClassName = RestCsrfPreventionFilter.class.getName(); diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java index c90cc844e39..0f4a85628b9 100644 --- a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java +++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java @@ -29,8 +29,8 @@ import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs; import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession; import org.apache.hadoop.ozone.ksm.protocolPB .KeySpaceManagerProtocolClientSideTranslatorPB; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts.Versioning; import org.apache.hadoop.ozone.client.io.ChunkGroupInputStream; @@ -41,9 +41,9 @@ import org.apache.hadoop.ozone.protocolPB.KSMPBHelper; import org.apache.hadoop.ozone.ksm.KSMConfigKeys; import org.apache.hadoop.ozone.OzoneAcl; import org.apache.hadoop.ozone.web.request.OzoneQuota; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.apache.hadoop.scm.XceiverClientManager; -import org.apache.hadoop.scm.protocolPB +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.XceiverClientManager; +import org.apache.hadoop.hdds.scm.protocolPB .StorageContainerLocationProtocolClientSideTranslatorPB; import org.apache.hadoop.ozone.client.rest.OzoneException; import org.apache.hadoop.ozone.web.handlers.BucketArgs; @@ -84,8 +84,8 @@ public final class DistributedStorageHandler implements StorageHandler { private final OzoneAcl.OzoneACLRights groupRights; private int chunkSize; private final boolean useRatis; - private final HdslProtos.ReplicationType type; - private final HdslProtos.ReplicationFactor factor; + private final HddsProtos.ReplicationType type; + private final HddsProtos.ReplicationFactor factor; /** * Creates a new DistributedStorageHandler. @@ -107,11 +107,11 @@ public final class DistributedStorageHandler implements StorageHandler { ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT); if(useRatis) { - type = HdslProtos.ReplicationType.RATIS; - factor = HdslProtos.ReplicationFactor.THREE; + type = HddsProtos.ReplicationType.RATIS; + factor = HddsProtos.ReplicationFactor.THREE; } else { - type = HdslProtos.ReplicationType.STAND_ALONE; - factor = HdslProtos.ReplicationFactor.ONE; + type = HddsProtos.ReplicationType.STAND_ALONE; + factor = HddsProtos.ReplicationFactor.ONE; } chunkSize = conf.getInt(ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY, diff --git a/hadoop-ozone/ozone-manager/pom.xml b/hadoop-ozone/ozone-manager/pom.xml index b0bc73dc962..9b8f42c1438 100644 --- a/hadoop-ozone/ozone-manager/pom.xml +++ b/hadoop-ozone/ozone-manager/pom.xml @@ -101,7 +101,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework ${project.build.directory}/ webapps/static/**/*.* diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java index 2e1fbd37c96..bf223324ae6 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java @@ -19,7 +19,7 @@ package org.apache.hadoop.ozone.ksm; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.server.ServiceRuntimeInfo; +import org.apache.hadoop.hdds.server.ServiceRuntimeInfo; /** * This is the JMX management interface for ksm information. diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java index f241de67697..fa0eaa2acb8 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java @@ -27,7 +27,7 @@ import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup; import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs; import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo; import org.apache.hadoop.ozone.common.BlockGroup; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.ksm.exceptions.KSMException; import org.apache.hadoop.ozone.ksm.exceptions.KSMException.ResultCodes; @@ -65,7 +65,7 @@ import static org.apache.hadoop.ozone.ksm.KSMConfigKeys .OZONE_KSM_DB_CACHE_SIZE_DEFAULT; import static org.apache.hadoop.ozone.ksm.KSMConfigKeys .OZONE_KSM_DB_CACHE_SIZE_MB; -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; /** * KSM metadata manager interface. diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java index d444e5c38d1..015bed69864 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java @@ -21,12 +21,12 @@ import java.io.IOException; import java.util.Properties; import java.util.UUID; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.common.Storage; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeType; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType; import static org.apache.hadoop.ozone.OzoneConsts.SCM_ID; -import static org.apache.hadoop.hdsl.server.ServerUtils.getOzoneMetaDirPath; +import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; /** * KSMStorage is responsible for management of the StorageDirectories used by diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java index 3beaed49132..14fb69c35f8 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java @@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.ksm; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; import org.apache.hadoop.ozone.common.BlockGroup; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.BackgroundService; import org.apache.hadoop.utils.BackgroundTask; diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java index 95f4c08d6f8..70ba178ba3d 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java @@ -24,15 +24,15 @@ import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs; import org.apache.hadoop.ozone.common.BlockGroup; import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo; import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.ksm.exceptions.KSMException; import org.apache.hadoop.ozone.ksm.exceptions.KSMException.ResultCodes; import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup; import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession; import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.KeyInfo; -import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.BackgroundService; import org.apache.hadoop.utils.BatchOperation; @@ -71,10 +71,10 @@ import static org.apache.hadoop.ozone .OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_DEFAULT; import static org.apache.hadoop.ozone .OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_IN_MB; -import org.apache.hadoop.hdsl.protocol - .proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol - .proto.HdslProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol + .proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.protocol + .proto.HddsProtos.ReplicationFactor; /** diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java index fdec56c1dde..39ef396378f 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.hdsl.server.ServiceRuntimeInfoImpl; +import org.apache.hadoop.hdds.server.ServiceRuntimeInfoImpl; import org.apache.hadoop.ozone.common.Storage.StorageState; import org.apache.hadoop.ozone.ksm.exceptions.KSMException; import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs; @@ -42,32 +42,32 @@ import org.apache.hadoop.ozone.ksm.exceptions.KSMException.ResultCodes; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos .ServicePort; import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.OzoneAclInfo; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.ozone.protocolPB .KeySpaceManagerProtocolServerSideTranslatorPB; -import org.apache.hadoop.scm.ScmInfo; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; -import org.apache.hadoop.scm.protocol.StorageContainerLocationProtocol; -import org.apache.hadoop.scm.protocolPB +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.scm.protocolPB .ScmBlockLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.protocolPB.ScmBlockLocationProtocolPB; -import org.apache.hadoop.scm.protocolPB +import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB; +import org.apache.hadoop.hdds.scm.protocolPB .StorageContainerLocationProtocolClientSideTranslatorPB; -import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB; +import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.StringUtils; -import static org.apache.hadoop.hdsl.HdslUtils.getScmAddressForBlockClients; -import static org.apache.hadoop.hdsl.HdslUtils.getScmAddressForClients; -import static org.apache.hadoop.hdsl.HdslUtils.isHdslEnabled; +import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForBlockClients; +import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients; +import static org.apache.hadoop.hdds.HddsUtils.isHddsEnabled; import static org.apache.hadoop.ozone.KsmUtils.getKsmAddress; -import static org.apache.hadoop.hdsl.server.ServerUtils +import static org.apache.hadoop.hdds.server.ServerUtils .updateRPCListenAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -92,7 +92,7 @@ import static org.apache.hadoop.ozone.ksm.KSMConfigKeys import static org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.KeySpaceManagerService .newReflectiveBlockingService; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos .NodeState.HEALTHY; import static org.apache.hadoop.util.ExitUtil.terminate; @@ -338,7 +338,7 @@ public final class KeySpaceManager extends ServiceRuntimeInfoImpl public static KeySpaceManager createKSM(String[] argv, OzoneConfiguration conf) throws IOException { - if (!isHdslEnabled(conf)) { + if (!isHddsEnabled(conf)) { System.err.println("KSM cannot be started in secure mode or when " + OZONE_ENABLED + " is set to false"); System.exit(1); @@ -840,7 +840,7 @@ public final class KeySpaceManager extends ServiceRuntimeInfoImpl // When we implement multi-home this call has to be handled properly. List services = new ArrayList<>(); ServiceInfo.Builder ksmServiceInfoBuilder = ServiceInfo.newBuilder() - .setNodeType(HdslProtos.NodeType.KSM) + .setNodeType(HddsProtos.NodeType.KSM) .setHostname(ksmRpcAddress.getHostName()) .addServicePort(ServicePort.newBuilder() .setType(ServicePort.Type.RPC) @@ -865,22 +865,22 @@ public final class KeySpaceManager extends ServiceRuntimeInfoImpl InetSocketAddress scmAddr = getScmAddressForClients( configuration); ServiceInfo.Builder scmServiceInfoBuilder = ServiceInfo.newBuilder() - .setNodeType(HdslProtos.NodeType.SCM) + .setNodeType(HddsProtos.NodeType.SCM) .setHostname(scmAddr.getHostName()) .addServicePort(ServicePort.newBuilder() .setType(ServicePort.Type.RPC) .setValue(scmAddr.getPort()).build()); services.add(scmServiceInfoBuilder.build()); - List nodes = scmContainerClient.queryNode( - EnumSet.of(HEALTHY), HdslProtos.QueryScope.CLUSTER, "") + List nodes = scmContainerClient.queryNode( + EnumSet.of(HEALTHY), HddsProtos.QueryScope.CLUSTER, "") .getNodesList(); - for (HdslProtos.Node node : nodes) { - HdslProtos.DatanodeDetailsProto datanode = node.getNodeID(); + for (HddsProtos.Node node : nodes) { + HddsProtos.DatanodeDetailsProto datanode = node.getNodeID(); ServiceInfo.Builder dnServiceInfoBuilder = ServiceInfo.newBuilder() - .setNodeType(HdslProtos.NodeType.DATANODE) + .setNodeType(HddsProtos.NodeType.DATANODE) .setHostname(datanode.getHostName()); dnServiceInfoBuilder.addServicePort(ServicePort.newBuilder() diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java index 3cf43608f3e..478804b32c5 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java @@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.ksm; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; -import org.apache.hadoop.hdsl.server.BaseHttpServer; +import org.apache.hadoop.hdds.server.BaseHttpServer; import java.io.IOException; diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java index 7f60bf83c84..7a2d7cc9ca0 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java @@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.ksm; import org.apache.hadoop.ozone.common.BlockGroup; import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; -import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol; +import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol; import org.apache.hadoop.utils.BackgroundService; import org.apache.hadoop.utils.BackgroundTask; import org.apache.hadoop.utils.BackgroundTaskQueue; diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java index 4561c435eb3..cc2f78aa34d 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java @@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.ksm; import com.google.common.base.Preconditions; import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.ksm.exceptions.KSMException; import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.OzoneAclInfo; @@ -26,7 +26,7 @@ import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.VolumeList; import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.VolumeInfo; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.util.Time; import org.apache.hadoop.utils.BatchOperation; import org.slf4j.Logger; @@ -137,9 +137,9 @@ public class VolumeManagerImpl implements VolumeManager { BatchOperation batch = new BatchOperation(); // Write the vol info - List metadataList = new LinkedList<>(); + List metadataList = new LinkedList<>(); for (Map.Entry entry : args.getKeyValueMap().entrySet()) { - metadataList.add(HdslProtos.KeyValue.newBuilder() + metadataList.add(HddsProtos.KeyValue.newBuilder() .setKey(entry.getKey()).setValue(entry.getValue()).build()); } List aclList = args.getAclMap().ozoneAclGetProtobuf(); diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java index d4bc10f4554..02a4120d776 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java @@ -98,7 +98,7 @@ import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.ServiceListRequest; import org.apache.hadoop.ozone.protocol.proto .KeySpaceManagerProtocolProtos.ServiceListResponse; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -322,9 +322,9 @@ public class KeySpaceManagerProtocolServerSideTranslatorPB implements LocateKeyResponse.newBuilder(); try { KeyArgs keyArgs = request.getKeyArgs(); - HdslProtos.ReplicationType type = + HddsProtos.ReplicationType type = keyArgs.hasType()? keyArgs.getType() : null; - HdslProtos.ReplicationFactor factor = + HddsProtos.ReplicationFactor factor = keyArgs.hasFactor()? keyArgs.getFactor() : null; KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder() .setVolumeName(keyArgs.getVolumeName()) @@ -473,9 +473,9 @@ public class KeySpaceManagerProtocolServerSideTranslatorPB implements CommitKeyResponse.newBuilder(); try { KeyArgs keyArgs = request.getKeyArgs(); - HdslProtos.ReplicationType type = + HddsProtos.ReplicationType type = keyArgs.hasType()? keyArgs.getType() : null; - HdslProtos.ReplicationFactor factor = + HddsProtos.ReplicationFactor factor = keyArgs.hasFactor()? keyArgs.getFactor() : null; KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder() .setVolumeName(keyArgs.getVolumeName()) @@ -501,9 +501,9 @@ public class KeySpaceManagerProtocolServerSideTranslatorPB implements AllocateBlockResponse.newBuilder(); try { KeyArgs keyArgs = request.getKeyArgs(); - HdslProtos.ReplicationType type = + HddsProtos.ReplicationType type = keyArgs.hasType()? keyArgs.getType() : null; - HdslProtos.ReplicationFactor factor = + HddsProtos.ReplicationFactor factor = keyArgs.hasFactor()? keyArgs.getFactor() : null; KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder() .setVolumeName(keyArgs.getVolumeName()) diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java index 82b3602f982..8553255d19e 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java @@ -26,7 +26,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.client.rest.OzoneException; import org.apache.hadoop.ozone.web.ozShell.bucket.UpdateBucketHandler; import org.apache.hadoop.ozone.web.ozShell.keys.DeleteKeyHandler; diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java index 7f1b8ca0ab3..de4a85ac46b 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java @@ -19,7 +19,7 @@ package org.apache.hadoop.ozone.ksm; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.ozone.client.io.ChunkGroupInputStream; import org.apache.hadoop.ozone.client.io.ChunkGroupOutputStream; -import org.apache.hadoop.scm.storage.ChunkInputStream; +import org.apache.hadoop.hdds.scm.storage.ChunkInputStream; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java index 5e24167eb6b..b263df56a1e 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; diff --git a/hadoop-ozone/pom.xml b/hadoop-ozone/pom.xml index 6878f3c93c4..40c50becf86 100644 --- a/hadoop-ozone/pom.xml +++ b/hadoop-ozone/pom.xml @@ -56,32 +56,32 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework provided org.apache.hadoop - hadoop-hdsl-server-scm + hadoop-hdds-server-scm provided org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service provided org.apache.hadoop - hadoop-hdsl-client + hadoop-hdds-client provided org.apache.hadoop - hadoop-hdsl-tools + hadoop-hdds-tools provided @@ -98,13 +98,13 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-server-scm + hadoop-hdds-server-scm test test-jar org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service test test-jar diff --git a/hadoop-ozone/tools/pom.xml b/hadoop-ozone/tools/pom.xml index 591960d62ab..573afcff433 100644 --- a/hadoop-ozone/tools/pom.xml +++ b/hadoop-ozone/tools/pom.xml @@ -29,7 +29,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> jar - hdsl + hdds true @@ -51,11 +51,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> org.apache.hadoop - hadoop-hdsl-server-scm + hadoop-hdds-server-scm org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common org.openjdk.jmh diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java index adcff4f7b36..d933e6f2e70 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java @@ -34,8 +34,11 @@ import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.time.DurationFormatUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hdds.client.OzoneQuota; +import org.apache.hadoop.hdds.client.ReplicationFactor; +import org.apache.hadoop.hdds.client.ReplicationType; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.client.*; import org.apache.hadoop.ozone.client.io.OzoneInputStream; diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java index c20fdeb202e..8a6e6347ed7 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java @@ -19,12 +19,14 @@ package org.apache.hadoop.ozone.genesis; import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerStateMap; -import org.apache.hadoop.ozone.scm.exceptions.SCMException; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.states.ContainerStateMap; +import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.apache.hadoop.util.Time; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Level; @@ -40,10 +42,10 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState.OPEN; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState.CLOSED; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.OPEN; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.CLOSED; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; @State(Scope.Thread) public class BenchMarkContainerStateMap { diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java index dfd33f7bbbb..93b22f97220 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java @@ -6,16 +6,16 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.datanode.StorageLocation; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl; import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl; import org.apache.hadoop.ozone.container.common.impl.Dispatcher; import org.apache.hadoop.ozone.container.common.impl.KeyManagerImpl; import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.PipelineChannel; +import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Level; import org.openjdk.jmh.annotations.Scope; @@ -33,18 +33,18 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.CreateContainerRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ReadChunkRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.WriteChunkRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.PutKeyRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.GetKeyRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.CreateContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ReadChunkRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.WriteChunkRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData; -import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor; +import org.apache.hadoop.hdds.protocol.proto.ContainerProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; @State(Scope.Benchmark) public class BenchMarkDatanodeDispatcher { diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java index 7c3ef3c05b3..e77cb2276c8 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java @@ -24,7 +24,7 @@ import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; /** - * Main class that executes a set of HDSL/Ozone benchmarks. + * Main class that executes a set of HDDS/Ozone benchmarks. * We purposefully don't use the runner and tools classes from Hadoop. * There are some name collisions with OpenJDK JMH package. *

diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java index 78b579d9945..199ead58ea6 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java @@ -3,7 +3,7 @@ package org.apache.hadoop.ozone.genesis; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.StorageUnit; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.utils.MetadataStoreBuilder; diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java index bd0c2ccfe1b..c9b44e318dc 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; +package org.apache.hadoop.hdds.scm.cli; import com.google.common.base.Preconditions; import org.apache.commons.cli.BasicParser; @@ -28,16 +28,16 @@ import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtilClient; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.OzoneAclInfo; import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.BucketInfo; import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyInfo; import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeInfo; import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeList; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.Pipeline; -import org.apache.hadoop.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos.Pipeline; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.utils.MetadataStore; @@ -503,7 +503,7 @@ public class SQLCLI extends Configured implements Tool { String containerName = new String(key, encoding); ContainerInfo containerInfo = null; containerInfo = ContainerInfo.fromProtobuf( - HdslProtos.SCMContainerInfo.PARSER.parseFrom(value)); + HddsProtos.SCMContainerInfo.PARSER.parseFrom(value)); Preconditions.checkNotNull(containerInfo); try { //TODO: include container state to sqllite schema @@ -533,7 +533,7 @@ public class SQLCLI extends Configured implements Tool { pipeline.getPipelineChannel().getLeaderID()); executeSQL(conn, insertContainerInfo); - for (HdslProtos.DatanodeDetailsProto dd : + for (HddsProtos.DatanodeDetailsProto dd : pipeline.getPipelineChannel().getMembersList()) { String uuid = dd.getUuid(); if (!uuidChecked.contains(uuid)) { @@ -629,7 +629,7 @@ public class SQLCLI extends Configured implements Tool { dbStore.iterate(null, (key, value) -> { DatanodeDetails nodeId = DatanodeDetails - .getFromProtoBuf(HdslProtos.DatanodeDetailsProto + .getFromProtoBuf(HddsProtos.DatanodeDetailsProto .PARSER.parseFrom(key)); String blockPool = DFSUtil.bytes2String(value); try { diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java index 4c38ae0bd20..fbef0d2ec6b 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.ozone.scm.cli; +package org.apache.hadoop.hdds.scm.cli; /** * Command line helpers for scm management. diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index e5cd338bbeb..6e94e010566 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -569,18 +569,18 @@ org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common ${project.version} org.apache.hadoop - hadoop-hdsl-client + hadoop-hdds-client ${project.version} org.apache.hadoop - hadoop-hdsl-tools + hadoop-hdds-tools ${project.version} @@ -610,31 +610,31 @@ org.apache.hadoop - hadoop-hdsl-server-framework + hadoop-hdds-server-framework ${project.version} org.apache.hadoop - hadoop-hdsl-server-scm + hadoop-hdds-server-scm ${project.version} org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service ${project.version} org.apache.hadoop - hadoop-hdsl-container-service + hadoop-hdds-container-service ${project.version} test-jar org.apache.hadoop - hadoop-hdsl-server-scm + hadoop-hdds-server-scm test-jar ${project.version} diff --git a/hadoop-tools/hadoop-ozone/pom.xml b/hadoop-tools/hadoop-ozone/pom.xml index a58b8755a9f..2464c32c1e9 100644 --- a/hadoop-tools/hadoop-ozone/pom.xml +++ b/hadoop-tools/hadoop-ozone/pom.xml @@ -82,7 +82,7 @@ org.apache.hadoop - hadoop-hdsl-common + hadoop-hdds-common provided diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java index 081d50d331b..c2a2fe2c066 100644 --- a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java +++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java @@ -50,8 +50,8 @@ import org.apache.hadoop.ozone.client.OzoneClientFactory; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.client.OzoneKey; import org.apache.hadoop.ozone.client.OzoneVolume; -import org.apache.hadoop.ozone.client.ReplicationFactor; -import org.apache.hadoop.ozone.client.ReplicationType; +import org.apache.hadoop.hdds.client.ReplicationFactor; +import org.apache.hadoop.hdds.client.ReplicationType; import org.apache.http.client.utils.URIBuilder; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java index 848ddff02a2..6be9f919ccd 100644 --- a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java +++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler; -import org.apache.hadoop.hdsl.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.MiniOzoneClassicCluster; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; diff --git a/hadoop-tools/hadoop-ozone/src/todo/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java b/hadoop-tools/hadoop-ozone/src/todo/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java index 85120c38dd4..97ec3f46142 100644 --- a/hadoop-tools/hadoop-ozone/src/todo/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java +++ b/hadoop-tools/hadoop-ozone/src/todo/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java @@ -36,7 +36,7 @@ import org.apache.hadoop.ozone.web.handlers.VolumeArgs; import org.apache.hadoop.ozone.web.interfaces.StorageHandler; import org.apache.hadoop.ozone.web.utils.OzoneUtils; import org.apache.hadoop.ozone.ksm.KSMConfigKeys; -import org.apache.hadoop.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.junit.Assert; import java.io.IOException; diff --git a/hadoop-tools/hadoop-tools-dist/pom.xml b/hadoop-tools/hadoop-tools-dist/pom.xml index 83ded3ee5e9..21cc7cef8c5 100644 --- a/hadoop-tools/hadoop-tools-dist/pom.xml +++ b/hadoop-tools/hadoop-tools-dist/pom.xml @@ -194,7 +194,7 @@ - hdsl + hdds false diff --git a/hadoop-tools/pom.xml b/hadoop-tools/pom.xml index 7d99762f185..f421e580bae 100644 --- a/hadoop-tools/pom.xml +++ b/hadoop-tools/pom.xml @@ -69,7 +69,7 @@ - hdsl + hdds false diff --git a/pom.xml b/pom.xml index 7c633463644..66b173cd6f9 100644 --- a/pom.xml +++ b/pom.xml @@ -386,7 +386,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs **/build/** **/patchprocess/** **/*.js - hadoop-hdsl/**/nvd3-* + hadoop-hdds/**/nvd3-* @@ -589,7 +589,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs - hdsl-src + hdds-src false @@ -609,12 +609,12 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs false false - hadoop-${project.version}-src-with-hdsl + hadoop-${project.version}-src-with-hdds hadoop-dist/target - hadoop-assemblies/src/main/resources/assemblies/hadoop-src-with-hdsl.xml + hadoop-assemblies/src/main/resources/assemblies/hadoop-src-with-hdds.xml @@ -634,7 +634,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs - Hadoop source tar (including HDSL) available at: ${basedir}/hadoop-dist/target/hadoop-${project.version}-src-with-hdsl.tar.gz + Hadoop source tar (including HDDS) available at: ${basedir}/hadoop-dist/target/hadoop-${project.version}-src-with-hdds.tar.gz @@ -741,14 +741,14 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs - hdsl + hdds false hadoop-ozone hadoop-cblock - hadoop-hdsl + hadoop-hdds hadoop-ozone/acceptance-test