From f6fa5bd1aa085a4d22f3450b545bb70063da9f51 Mon Sep 17 00:00:00 2001
From: slfan1989 <55643692+slfan1989@users.noreply.github.com>
Date: Wed, 18 May 2022 04:12:04 -0700
Subject: [PATCH] HADOOP-18229. Fix Hadoop-Common JavaDoc Errors (#4292)
Contributed by slfan1989
---
hadoop-common-project/hadoop-common/pom.xml | 10 +
.../apache/hadoop/conf/ConfigRedactor.java | 4 +-
.../org/apache/hadoop/conf/Configuration.java | 52 ++-
.../org/apache/hadoop/conf/Configured.java | 4 +-
.../apache/hadoop/conf/Reconfigurable.java | 6 +
.../hadoop/conf/ReconfigurableBase.java | 3 +
.../hadoop/conf/ReconfigurationException.java | 10 +
.../conf/ReconfigurationTaskStatus.java | 4 +-
.../org/apache/hadoop/crypto/CryptoCodec.java | 10 +-
.../hadoop/crypto/CryptoInputStream.java | 2 +-
.../hadoop/crypto/CryptoOutputStream.java | 2 +-
.../hadoop/crypto/CryptoStreamUtils.java | 35 +-
.../apache/hadoop/crypto/OpensslCipher.java | 23 +-
.../apache/hadoop/crypto/key/KeyProvider.java | 48 +--
.../key/KeyProviderCryptoExtension.java | 9 +-
.../KeyProviderDelegationTokenExtension.java | 4 +-
.../apache/hadoop/crypto/key/KeyShell.java | 4 +-
.../hadoop/crypto/key/kms/ValueQueue.java | 12 +-
.../apache/hadoop/fs/AbstractFileSystem.java | 277 +++++++++++++--
.../org/apache/hadoop/fs/AvroFSInput.java | 13 +-
.../hadoop/fs/BatchedRemoteIterator.java | 3 +
.../org/apache/hadoop/fs/BlockLocation.java | 45 +++
.../org/apache/hadoop/fs/ByteBufferUtil.java | 6 +
.../apache/hadoop/fs/CachingGetSpaceUsed.java | 9 +
.../apache/hadoop/fs/ChecksumFileSystem.java | 40 ++-
.../java/org/apache/hadoop/fs/ChecksumFs.java | 37 +-
.../fs/CommonConfigurationKeysPublic.java | 7 +-
.../hadoop/fs/CompositeCrcFileChecksum.java | 8 +-
.../org/apache/hadoop/fs/ContentSummary.java | 26 +-
.../java/org/apache/hadoop/fs/CreateFlag.java | 2 +
.../main/java/org/apache/hadoop/fs/DF.java | 10 +-
.../hadoop/fs/DelegationTokenRenewer.java | 26 +-
.../java/org/apache/hadoop/fs/FSBuilder.java | 45 ++-
.../hadoop/fs/FSDataOutputStreamBuilder.java | 32 ++
.../org/apache/hadoop/fs/FSInputChecker.java | 10 +-
.../org/apache/hadoop/fs/FSLinkResolver.java | 2 +-
.../org/apache/hadoop/fs/FSOutputSummer.java | 8 +
.../org/apache/hadoop/fs/FileChecksum.java | 25 +-
.../org/apache/hadoop/fs/FileContext.java | 112 +++---
.../apache/hadoop/fs/FileEncryptionInfo.java | 3 +
.../java/org/apache/hadoop/fs/FileStatus.java | 14 +
.../java/org/apache/hadoop/fs/FileSystem.java | 170 +++++++--
.../hadoop/fs/FileSystemLinkResolver.java | 8 +-
.../java/org/apache/hadoop/fs/FileUtil.java | 87 ++++-
.../apache/hadoop/fs/FilterFileSystem.java | 4 +-
.../java/org/apache/hadoop/fs/FsShell.java | 2 +-
.../java/org/apache/hadoop/fs/FsStatus.java | 23 +-
.../org/apache/hadoop/fs/GlobExpander.java | 4 +-
.../hadoop/fs/GlobalStorageStatistics.java | 2 +
.../org/apache/hadoop/fs/HarFileSystem.java | 8 +-
.../java/org/apache/hadoop/fs/HardLink.java | 6 +
.../apache/hadoop/fs/HasFileDescriptor.java | 2 +-
.../apache/hadoop/fs/LocalDirAllocator.java | 31 +-
.../org/apache/hadoop/fs/LocalFileSystem.java | 6 +-
.../fs/MD5MD5CRC32CastagnoliFileChecksum.java | 8 +-
.../hadoop/fs/MD5MD5CRC32FileChecksum.java | 13 +-
.../fs/MD5MD5CRC32GzipFileChecksum.java | 8 +-
.../apache/hadoop/fs/MultipartUploader.java | 3 +-
.../hadoop/fs/MultipartUploaderBuilder.java | 19 +-
.../java/org/apache/hadoop/fs/Options.java | 7 +-
.../java/org/apache/hadoop/fs/QuotaUsage.java | 64 +++-
.../apache/hadoop/fs/RawLocalFileSystem.java | 7 +-
.../java/org/apache/hadoop/fs/Seekable.java | 14 +-
.../main/java/org/apache/hadoop/fs/Stat.java | 4 +-
.../apache/hadoop/fs/StorageStatistics.java | 5 +
.../main/java/org/apache/hadoop/fs/Trash.java | 43 ++-
.../org/apache/hadoop/fs/TrashPolicy.java | 16 +-
.../java/org/apache/hadoop/fs/XAttrCodec.java | 6 +-
.../hadoop/fs/impl/AbstractFSBuilderImpl.java | 2 +
.../fs/impl/AbstractMultipartUploader.java | 2 +-
.../FutureDataInputStreamBuilderImpl.java | 5 +
.../hadoop/fs/impl/FutureIOSupport.java | 2 +
.../fs/impl/MultipartUploaderBuilderImpl.java | 3 +
.../hadoop/fs/permission/AclStatus.java | 4 +-
.../apache/hadoop/fs/permission/FsAction.java | 20 +-
.../hadoop/fs/permission/FsCreateModes.java | 9 +-
.../hadoop/fs/permission/FsPermission.java | 54 ++-
.../fs/permission/PermissionStatus.java | 39 ++-
.../org/apache/hadoop/fs/shell/Command.java | 24 +-
.../fs/shell/CommandWithDestination.java | 3 +
.../org/apache/hadoop/fs/shell/PathData.java | 3 +-
.../hadoop/fs/shell/find/BaseExpression.java | 19 +-
.../hadoop/fs/shell/find/Expression.java | 15 +-
.../hadoop/fs/shell/find/FindOptions.java | 1 +
.../apache/hadoop/fs/shell/find/Result.java | 21 +-
.../fs/statistics/IOStatisticsSnapshot.java | 8 +-
.../fs/statistics/IOStatisticsSupport.java | 1 +
.../hadoop/fs/statistics/MeanStatistic.java | 1 +
.../statistics/impl/IOStatisticsBinding.java | 4 +
.../apache/hadoop/fs/store/DataBlocks.java | 4 +
.../fs/store/audit/AuditingFunctions.java | 2 +
.../apache/hadoop/fs/viewfs/ConfigUtil.java | 56 +--
.../org/apache/hadoop/fs/viewfs/FsGetter.java | 9 +
.../apache/hadoop/fs/viewfs/InodeTree.java | 53 +--
.../fs/viewfs/MountTableConfigLoader.java | 1 +
.../hadoop/fs/viewfs/ViewFileSystem.java | 18 +-
.../viewfs/ViewFileSystemOverloadScheme.java | 10 +-
.../hadoop/fs/viewfs/ViewFileSystemUtil.java | 5 +-
.../org/apache/hadoop/fs/viewfs/ViewFs.java | 2 +-
.../hadoop/ha/ActiveStandbyElector.java | 31 +-
.../java/org/apache/hadoop/ha/HAAdmin.java | 3 +
.../apache/hadoop/ha/HAServiceProtocol.java | 8 +-
.../org/apache/hadoop/ha/HAServiceTarget.java | 8 +-
.../org/apache/hadoop/ha/HealthMonitor.java | 3 +
.../hadoop/ha/ZKFailoverController.java | 2 +
.../org/apache/hadoop/http/HtmlQuoting.java | 1 +
.../org/apache/hadoop/http/HttpServer2.java | 36 +-
.../apache/hadoop/io/AbstractMapWritable.java | 22 +-
.../java/org/apache/hadoop/io/ArrayFile.java | 68 +++-
.../hadoop/io/ArrayPrimitiveWritable.java | 4 +-
.../apache/hadoop/io/BinaryComparable.java | 9 +
.../org/apache/hadoop/io/BloomMapFile.java | 2 +-
.../org/apache/hadoop/io/BooleanWritable.java | 9 +-
.../io/BoundedByteArrayOutputStream.java | 14 +-
.../org/apache/hadoop/io/ByteWritable.java | 10 +-
.../org/apache/hadoop/io/BytesWritable.java | 4 +
.../apache/hadoop/io/CompressedWritable.java | 13 +-
.../org/apache/hadoop/io/DataInputBuffer.java | 23 +-
.../apache/hadoop/io/DataOutputBuffer.java | 32 +-
.../org/apache/hadoop/io/EnumSetWritable.java | 15 +-
.../org/apache/hadoop/io/FloatWritable.java | 10 +-
.../org/apache/hadoop/io/GenericWritable.java | 4 +-
.../java/org/apache/hadoop/io/IOUtils.java | 14 +-
.../org/apache/hadoop/io/InputBuffer.java | 23 +-
.../org/apache/hadoop/io/IntWritable.java | 10 +-
.../org/apache/hadoop/io/LongWritable.java | 10 +-
.../java/org/apache/hadoop/io/MD5Hash.java | 81 ++++-
.../java/org/apache/hadoop/io/MapFile.java | 230 ++++++++++--
.../apache/hadoop/io/MultipleIOException.java | 11 +-
.../org/apache/hadoop/io/NullWritable.java | 5 +-
.../org/apache/hadoop/io/ObjectWritable.java | 61 +++-
.../org/apache/hadoop/io/OutputBuffer.java | 20 +-
.../org/apache/hadoop/io/RawComparator.java | 2 +-
.../org/apache/hadoop/io/ReadaheadPool.java | 2 +-
.../org/apache/hadoop/io/SecureIOUtils.java | 26 +-
.../org/apache/hadoop/io/SequenceFile.java | 328 ++++++++++++------
.../java/org/apache/hadoop/io/SetFile.java | 75 +++-
.../org/apache/hadoop/io/ShortWritable.java | 7 +-
.../main/java/org/apache/hadoop/io/Text.java | 77 +++-
.../main/java/org/apache/hadoop/io/UTF8.java | 48 ++-
.../org/apache/hadoop/io/VIntWritable.java | 7 +-
.../org/apache/hadoop/io/VLongWritable.java | 7 +-
.../apache/hadoop/io/VersionedWritable.java | 2 +-
.../java/org/apache/hadoop/io/Writable.java | 4 +-
.../apache/hadoop/io/WritableComparator.java | 120 +++++--
.../apache/hadoop/io/WritableFactories.java | 26 +-
.../org/apache/hadoop/io/WritableFactory.java | 2 +-
.../org/apache/hadoop/io/WritableName.java | 32 +-
.../org/apache/hadoop/io/WritableUtils.java | 34 +-
.../apache/hadoop/io/compress/BZip2Codec.java | 8 +-
.../io/compress/BlockDecompressorStream.java | 4 +-
.../apache/hadoop/io/compress/CodecPool.java | 10 +-
.../hadoop/io/compress/CompressionCodec.java | 8 +-
.../io/compress/CompressionCodecFactory.java | 5 +-
.../io/compress/CompressionInputStream.java | 8 +-
.../io/compress/CompressionOutputStream.java | 4 +-
.../apache/hadoop/io/compress/Compressor.java | 3 +
.../hadoop/io/compress/Decompressor.java | 2 +-
.../io/compress/DecompressorStream.java | 2 +-
.../apache/hadoop/io/compress/Lz4Codec.java | 8 +-
.../hadoop/io/compress/SnappyCodec.java | 8 +-
.../compress/SplittableCompressionCodec.java | 2 +
.../hadoop/io/compress/ZStandardCodec.java | 8 +-
.../io/compress/bzip2/Bzip2Compressor.java | 1 +
.../io/compress/bzip2/Bzip2Decompressor.java | 2 +
.../io/compress/bzip2/CBZip2InputStream.java | 8 +-
.../io/compress/bzip2/CBZip2OutputStream.java | 10 +-
.../io/compress/lz4/Lz4Decompressor.java | 2 +-
.../compress/snappy/SnappyDecompressor.java | 2 +-
.../io/compress/zlib/ZlibCompressor.java | 1 +
.../io/compress/zlib/ZlibDecompressor.java | 2 +
.../hadoop/io/compress/zlib/ZlibFactory.java | 2 +-
.../io/compress/zstd/ZStandardCompressor.java | 2 +
.../compress/zstd/ZStandardDecompressor.java | 1 +
.../hadoop/io/erasurecode/CodecUtil.java | 2 +
.../io/erasurecode/ErasureCodeNative.java | 2 +
.../io/erasurecode/coder/ErasureCoder.java | 1 +
.../erasurecode/coder/ErasureCodingStep.java | 5 +-
.../io/erasurecode/coder/ErasureDecoder.java | 10 +-
.../coder/ErasureDecodingStep.java | 6 +-
.../io/erasurecode/coder/ErasureEncoder.java | 2 +-
.../coder/ErasureEncodingStep.java | 6 +-
.../coder/HHErasureCodingStep.java | 4 +-
.../coder/HHXORErasureDecodingStep.java | 4 +-
.../coder/HHXORErasureEncodingStep.java | 4 +-
.../erasurecode/coder/XORErasureDecoder.java | 2 +-
.../io/erasurecode/coder/util/HHUtil.java | 2 +
.../io/erasurecode/grouper/BlockGrouper.java | 6 +-
.../rawcoder/DecodingValidator.java | 4 +-
.../rawcoder/RawErasureDecoder.java | 2 +
.../rawcoder/RawErasureEncoder.java | 5 +-
.../erasurecode/rawcoder/util/DumpUtil.java | 10 +-
.../io/erasurecode/rawcoder/util/GF256.java | 10 +-
.../rawcoder/util/GaloisField.java | 38 +-
.../io/erasurecode/rawcoder/util/RSUtil.java | 15 +
.../hadoop/io/file/tfile/ByteArray.java | 2 +-
.../apache/hadoop/io/file/tfile/TFile.java | 95 ++---
.../apache/hadoop/io/file/tfile/Utils.java | 20 +-
.../apache/hadoop/io/nativeio/NativeIO.java | 46 ++-
.../hadoop/io/retry/AsyncCallHandler.java | 11 +-
.../apache/hadoop/io/retry/RetryPolicies.java | 37 ++
.../apache/hadoop/io/retry/RetryProxy.java | 5 +
.../apache/hadoop/io/retry/RetryUtils.java | 4 +-
.../hadoop/io/serializer/Deserializer.java | 7 +-
.../io/serializer/DeserializerComparator.java | 2 +-
.../JavaSerializationComparator.java | 2 +-
.../hadoop/io/serializer/Serialization.java | 7 +-
.../io/serializer/SerializationFactory.java | 2 +
.../hadoop/io/serializer/Serializer.java | 7 +-
.../io/serializer/avro/AvroSerialization.java | 6 +
.../apache/hadoop/ipc/AlignmentContext.java | 2 +-
.../apache/hadoop/ipc/CallQueueManager.java | 6 +
.../java/org/apache/hadoop/ipc/Client.java | 28 +-
.../org/apache/hadoop/ipc/ClientCache.java | 2 +
.../java/org/apache/hadoop/ipc/ClientId.java | 12 +-
.../hadoop/ipc/GenericRefreshProtocol.java | 6 +-
.../org/apache/hadoop/ipc/ProtobufHelper.java | 4 +-
.../apache/hadoop/ipc/ProtobufRpcEngine.java | 8 +
.../apache/hadoop/ipc/ProtobufRpcEngine2.java | 6 +
.../hadoop/ipc/ProtocolMetaInterface.java | 2 +-
.../org/apache/hadoop/ipc/ProtocolProxy.java | 3 +-
.../main/java/org/apache/hadoop/ipc/RPC.java | 180 +++++++---
.../hadoop/ipc/RefreshCallQueueProtocol.java | 2 +-
.../apache/hadoop/ipc/RefreshRegistry.java | 1 +
.../apache/hadoop/ipc/RemoteException.java | 5 +-
.../org/apache/hadoop/ipc/RetryCache.java | 22 +-
.../org/apache/hadoop/ipc/RpcClientUtil.java | 6 +-
.../java/org/apache/hadoop/ipc/RpcEngine.java | 37 +-
.../org/apache/hadoop/ipc/RpcScheduler.java | 9 +-
.../apache/hadoop/ipc/RpcServerException.java | 4 +-
.../java/org/apache/hadoop/ipc/Server.java | 68 +++-
.../apache/hadoop/ipc/VersionedProtocol.java | 1 +
.../apache/hadoop/ipc/WritableRpcEngine.java | 50 ++-
.../DecayRpcSchedulerDetailedMetrics.java | 7 +-
.../org/apache/hadoop/jmx/JMXJsonServlet.java | 11 +-
.../java/org/apache/hadoop/log/LogLevel.java | 2 +
.../hadoop/log/LogThrottlingHelper.java | 8 +-
.../apache/hadoop/metrics2/MetricsSystem.java | 6 +-
.../hadoop/metrics2/MetricsSystemMXBean.java | 10 +-
.../metrics2/lib/MutableMetricsFactory.java | 4 +-
.../metrics2/lib/MutableRollingAverages.java | 4 +-
.../hadoop/metrics2/lib/MutableStat.java | 2 +-
.../apache/hadoop/metrics2/package-info.java | 10 +-
.../metrics2/sink/PrometheusMetricsSink.java | 4 +
.../sink/ganglia/AbstractGangliaSink.java | 5 +-
.../metrics2/sink/ganglia/GangliaSink30.java | 2 +-
.../metrics2/sink/ganglia/GangliaSink31.java | 2 +-
.../apache/hadoop/metrics2/util/MBeans.java | 8 +-
.../hadoop/metrics2/util/SampleQuantiles.java | 2 +-
.../main/java/org/apache/hadoop/net/DNS.java | 10 +-
.../apache/hadoop/net/DNSToSwitchMapping.java | 2 +
.../apache/hadoop/net/DomainNameResolver.java | 13 +-
.../java/org/apache/hadoop/net/InnerNode.java | 5 +-
.../org/apache/hadoop/net/InnerNodeImpl.java | 15 +-
.../java/org/apache/hadoop/net/NetUtils.java | 60 ++--
.../apache/hadoop/net/NetworkTopology.java | 24 +-
.../apache/hadoop/net/ScriptBasedMapping.java | 8 +-
.../net/ScriptBasedMappingWithDependency.java | 5 +-
.../apache/hadoop/net/SocketInputStream.java | 8 +-
.../apache/hadoop/net/SocketOutputStream.java | 13 +-
.../apache/hadoop/net/unix/DomainSocket.java | 13 +-
.../apache/hadoop/security/Credentials.java | 33 +-
.../security/GroupMappingServiceProvider.java | 8 +-
.../org/apache/hadoop/security/Groups.java | 4 +-
.../hadoop/security/HadoopKerberosName.java | 4 +-
.../org/apache/hadoop/security/KDiag.java | 5 +-
.../apache/hadoop/security/KerberosInfo.java | 5 +-
.../hadoop/security/NullGroupsMapping.java | 2 +-
.../apache/hadoop/security/ProviderUtils.java | 1 +
.../security/RefreshUserMappingsProtocol.java | 4 +-
.../hadoop/security/SaslInputStream.java | 2 +-
.../security/SaslPropertiesResolver.java | 2 +-
.../apache/hadoop/security/SaslRpcClient.java | 14 +-
.../apache/hadoop/security/SaslRpcServer.java | 25 +-
.../apache/hadoop/security/SecurityUtil.java | 14 +-
.../hadoop/security/ShellBasedIdMapping.java | 9 +-
.../ShellBasedUnixGroupsNetgroupMapping.java | 2 +
.../hadoop/security/UserGroupInformation.java | 46 ++-
.../security/alias/CredentialProvider.java | 13 +-
.../security/alias/CredentialShell.java | 6 +-
.../security/authorize/AccessControlList.java | 1 +
.../authorize/ImpersonationProvider.java | 4 +-
.../hadoop/security/authorize/ProxyUsers.java | 12 +-
.../RefreshAuthorizationPolicyProtocol.java | 2 +-
.../ssl/ReloadingX509KeystoreManager.java | 4 +-
.../security/token/DelegationTokenIssuer.java | 13 +
.../hadoop/security/token/DtFetcher.java | 23 +-
.../security/token/DtFileOperations.java | 18 +-
.../hadoop/security/token/DtUtilShell.java | 4 +-
.../apache/hadoop/security/token/Token.java | 18 +-
.../hadoop/security/token/TokenInfo.java | 6 +-
.../hadoop/security/token/TokenRenewer.java | 40 ++-
.../AbstractDelegationTokenSecretManager.java | 94 +++--
.../web/DelegationTokenAuthenticatedURL.java | 6 +
.../DelegationTokenAuthenticationFilter.java | 1 +
.../web/DelegationTokenAuthenticator.java | 8 +
.../hadoop/service/AbstractService.java | 2 +-
.../hadoop/service/CompositeService.java | 2 +-
.../hadoop/service/ServiceStateModel.java | 3 +
.../launcher/AbstractLaunchableService.java | 2 +
.../service/launcher/ServiceLauncher.java | 8 +-
.../hadoop/service/launcher/package-info.java | 27 +-
.../org/apache/hadoop/tools/CommandShell.java | 4 +-
.../apache/hadoop/tools/GetGroupsBase.java | 6 +-
.../hadoop/tools/GetUserMappingsProtocol.java | 2 +-
.../org/apache/hadoop/tools/TableListing.java | 11 +-
.../apache/hadoop/util/AsyncDiskService.java | 7 +-
.../BlockingThreadPoolExecutorService.java | 1 +
.../org/apache/hadoop/util/CrcComposer.java | 27 ++
.../java/org/apache/hadoop/util/CrcUtil.java | 36 ++
.../java/org/apache/hadoop/util/Daemon.java | 11 +-
.../org/apache/hadoop/util/DataChecksum.java | 55 ++-
.../apache/hadoop/util/DirectBufferPool.java | 3 +
.../org/apache/hadoop/util/DiskChecker.java | 16 +-
.../hadoop/util/DiskValidatorFactory.java | 2 +
.../org/apache/hadoop/util/GcTimeMonitor.java | 35 +-
.../hadoop/util/GenericOptionsParser.java | 38 +-
.../org/apache/hadoop/util/GenericsUtil.java | 4 +
.../java/org/apache/hadoop/util/IPList.java | 2 +-
.../org/apache/hadoop/util/IdGenerator.java | 5 +-
.../apache/hadoop/util/IdentityHashStore.java | 11 +
.../apache/hadoop/util/IndexedSortable.java | 7 +
.../org/apache/hadoop/util/IndexedSorter.java | 8 +
.../apache/hadoop/util/InstrumentedLock.java | 1 +
.../hadoop/util/IntrusiveCollection.java | 23 ++
.../apache/hadoop/util/JsonSerialization.java | 4 +-
.../apache/hadoop/util/JvmPauseMonitor.java | 3 +
.../apache/hadoop/util/LightWeightCache.java | 7 +-
.../apache/hadoop/util/LightWeightGSet.java | 19 +-
.../hadoop/util/LightWeightResizableGSet.java | 2 +
.../org/apache/hadoop/util/LineReader.java | 6 +-
.../java/org/apache/hadoop/util/Lists.java | 30 +-
.../org/apache/hadoop/util/MachineList.java | 6 +-
.../apache/hadoop/util/NativeCodeLoader.java | 8 +-
.../hadoop/util/NativeLibraryChecker.java | 3 +-
.../apache/hadoop/util/OperationDuration.java | 4 +-
.../java/org/apache/hadoop/util/Options.java | 2 +-
.../apache/hadoop/util/PrintJarMainClass.java | 2 +-
.../org/apache/hadoop/util/PriorityQueue.java | 36 +-
.../org/apache/hadoop/util/ProgramDriver.java | 20 +-
.../java/org/apache/hadoop/util/Progress.java | 49 ++-
.../org/apache/hadoop/util/ProtoUtil.java | 4 +
.../org/apache/hadoop/util/QuickSort.java | 3 +
.../apache/hadoop/util/ReflectionUtils.java | 15 +-
.../java/org/apache/hadoop/util/RunJar.java | 7 +-
.../apache/hadoop/util/SequentialNumber.java | 17 +-
.../org/apache/hadoop/util/ServletUtil.java | 16 +-
.../java/org/apache/hadoop/util/Sets.java | 64 +++-
.../java/org/apache/hadoop/util/Shell.java | 98 +++++-
.../hadoop/util/ShutdownThreadsHelper.java | 8 +-
.../org/apache/hadoop/util/StopWatch.java | 3 +
.../apache/hadoop/util/StringInterner.java | 3 +
.../org/apache/hadoop/util/StringUtils.java | 60 +++-
.../java/org/apache/hadoop/util/Time.java | 2 +
.../java/org/apache/hadoop/util/Tool.java | 6 +-
.../org/apache/hadoop/util/ToolRunner.java | 10 +-
.../java/org/apache/hadoop/util/XMLUtils.java | 6 +-
.../java/org/apache/hadoop/util/ZKUtil.java | 1 +
.../org/apache/hadoop/util/bloom/Key.java | 4 +-
.../hadoop/util/concurrent/AsyncGet.java | 8 +-
.../hadoop/util/curator/ZKCuratorManager.java | 13 +-
.../functional/CommonCallableSupplier.java | 5 +
.../util/functional/RemoteIterators.java | 20 +-
.../hadoop/util/functional/package-info.java | 6 +-
.../apache/hadoop/util/hash/JenkinsHash.java | 2 +-
.../org/apache/hadoop/util/TestShell.java | 2 +-
366 files changed, 4843 insertions(+), 1353 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 6e762f567c1..d8e2dd35422 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -1171,6 +1171,16 @@
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+
+
+ **/FSProtos.java
+
+ *.proto:*.tracing:*.protobuf
+
+
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
index 5b2d1449f9c..881a2ce811b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
@@ -57,8 +57,8 @@ public class ConfigRedactor {
* Given a key / value pair, decides whether or not to redact and returns
* either the original value or text indicating it has been redacted.
*
- * @param key
- * @param value
+ * @param key param key.
+ * @param value param value, will return if conditions permit.
* @return Original value, or text indicating it has been redacted
*/
public String redact(String key, String value) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 1f809b7b547..5f720841d76 100755
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -317,7 +317,7 @@ public class Configuration implements Iterable>,
private boolean loadDefaults = true;
/**
- * Configuration objects
+ * Configuration objects.
*/
private static final WeakHashMap REGISTRY =
new WeakHashMap();
@@ -1908,6 +1908,7 @@ public class Configuration implements Iterable>,
* @param name Property name
* @param vStr The string value with time unit suffix to be converted.
* @param unit Unit to convert the stored property, if it exists.
+ * @return time duration in given time unit.
*/
public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) {
return getTimeDurationHelper(name, vStr, unit, unit);
@@ -1922,6 +1923,7 @@ public class Configuration implements Iterable>,
* @param vStr The string value with time unit suffix to be converted.
* @param defaultUnit Unit to convert the stored property, if it exists.
* @param returnUnit Unit for the returned value.
+ * @return time duration in given time unit.
*/
private long getTimeDurationHelper(String name, String vStr,
TimeUnit defaultUnit, TimeUnit returnUnit) {
@@ -2206,7 +2208,7 @@ public class Configuration implements Iterable>,
}
/**
- * Is the given value in the set of ranges
+ * Is the given value in the set of ranges.
* @param value the value to check
* @return is the value in the ranges?
*/
@@ -2263,7 +2265,7 @@ public class Configuration implements Iterable>,
}
/**
- * Parse the given attribute as a set of integer ranges
+ * Parse the given attribute as a set of integer ranges.
* @param name the attribute name
* @param defaultValue the default value if it is not set
* @return a new set of ranges from the configured value
@@ -2482,7 +2484,7 @@ public class Configuration implements Iterable>,
/**
* Fallback to clear text passwords in configuration.
- * @param name
+ * @param name the property name.
* @return clear text password or null
*/
protected char[] getPasswordFromConfig(String name) {
@@ -2547,6 +2549,8 @@ public class Configuration implements Iterable>,
/**
* Set the socket address for the name property as
* a host:port.
+ * @param name property name.
+ * @param addr inetSocketAddress addr.
*/
public void setSocketAddr(String name, InetSocketAddress addr) {
set(name, NetUtils.getHostPortString(addr));
@@ -2724,6 +2728,7 @@ public class Configuration implements Iterable>,
* @param name the conf key name.
* @param defaultValue default value.
* @param xface the interface implemented by the named class.
+ * @param Interface class type.
* @return property value as a Class,
* or defaultValue.
*/
@@ -2753,6 +2758,7 @@ public class Configuration implements Iterable>,
* @param name the property name.
* @param xface the interface implemented by the classes named by
* name.
+ * @param Interface class type.
* @return a List of objects implementing xface.
*/
@SuppressWarnings("unchecked")
@@ -2785,15 +2791,16 @@ public class Configuration implements Iterable>,
set(name, theClass.getName());
}
- /**
+ /**
* Get a local file under a directory named by dirsProp with
* the given path. If dirsProp contains multiple directories,
* then one is chosen based on path's hash code. If the selected
* directory does not exist, an attempt is made to create it.
- *
+ *
* @param dirsProp directory in which to locate the file.
* @param path file-path.
* @return local file under the directory with the given path.
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPath(String dirsProp, String path)
throws IOException {
@@ -2817,15 +2824,16 @@ public class Configuration implements Iterable>,
throw new IOException("No valid local directories in property: "+dirsProp);
}
- /**
+ /**
* Get a local file name under a directory named in dirsProp with
* the given path. If dirsProp contains multiple directories,
* then one is chosen based on path's hash code. If the selected
* directory does not exist, an attempt is made to create it.
- *
+ *
* @param dirsProp directory in which to locate the file.
* @param path file-path.
* @return local file under the directory with the given path.
+ * @throws IOException raised on errors performing I/O.
*/
public File getFile(String dirsProp, String path)
throws IOException {
@@ -3437,7 +3445,7 @@ public class Configuration implements Iterable>,
/**
* Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM.
- * @param prop
+ * @param prop properties.
*/
public void addTags(Properties prop) {
// Get all system tags
@@ -3538,7 +3546,7 @@ public class Configuration implements Iterable>,
/**
* Print a warning if a property with a given name already exists with a
- * different value
+ * different value.
*/
private void checkForOverride(Properties properties, String name, String attr, String value) {
String propertyValue = properties.getProperty(attr);
@@ -3548,11 +3556,12 @@ public class Configuration implements Iterable>,
}
}
- /**
+ /**
* Write out the non-default properties in this configuration to the given
* {@link OutputStream} using UTF-8 encoding.
- *
+ *
* @param out the output stream to write to.
+ * @throws IOException raised on errors performing I/O.
*/
public void writeXml(OutputStream out) throws IOException {
writeXml(new OutputStreamWriter(out, "UTF-8"));
@@ -3582,7 +3591,9 @@ public class Configuration implements Iterable>,
* the configuration, this method throws an {@link IllegalArgumentException}.
*
*
+ * @param propertyName xml property name.
* @param out the writer to write to.
+ * @throws IOException raised on errors performing I/O.
*/
public void writeXml(@Nullable String propertyName, Writer out)
throws IOException, IllegalArgumentException {
@@ -3736,7 +3747,7 @@ public class Configuration implements Iterable>,
* @param config the configuration
* @param propertyName property name
* @param out the Writer to write to
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws IllegalArgumentException when property name is not
* empty and the property is not found in configuration
**/
@@ -3783,7 +3794,7 @@ public class Configuration implements Iterable>,
*
* @param config the configuration
* @param out the Writer to write to
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void dumpConfiguration(Configuration config,
Writer out) throws IOException {
@@ -3812,7 +3823,7 @@ public class Configuration implements Iterable>,
* @param jsonGen json writer
* @param config configuration
* @param name property name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
private static void appendJSONProperty(JsonGenerator jsonGen,
Configuration config, String name, ConfigRedactor redactor)
@@ -3894,7 +3905,10 @@ public class Configuration implements Iterable>,
return this.quietmode;
}
- /** For debugging. List non-default properties to the terminal and exit. */
+ /** For debugging. List non-default properties to the terminal and exit.
+ * @param args the argument to be parsed.
+ * @throws Exception exception.
+ */
public static void main(String[] args) throws Exception {
new Configuration().writeXml(System.out);
}
@@ -3928,8 +3942,8 @@ public class Configuration implements Iterable>,
}
/**
- * get keys matching the the regex
- * @param regex
+ * get keys matching the the regex.
+ * @param regex the regex to match against.
* @return {@literal Map} with matching keys
*/
public Map getValByRegex(String regex) {
@@ -3974,6 +3988,8 @@ public class Configuration implements Iterable>,
/**
* Returns whether or not a deprecated name has been warned. If the name is not
* deprecated then always return false
+ * @param name proprties.
+ * @return true if name is a warned deprecation.
*/
public static boolean hasWarnedDeprecation(String name) {
DeprecationContext deprecations = deprecationContext.get();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
index f06af2b98df..77a7117d196 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
@@ -33,7 +33,9 @@ public class Configured implements Configurable {
this(null);
}
- /** Construct a Configured. */
+ /** Construct a Configured.
+ * @param conf the Configuration object.
+ */
public Configured(Configuration conf) {
setConf(conf);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
index c93dc31a881..915faf4c237 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
@@ -33,6 +33,9 @@ public interface Reconfigurable extends Configurable {
* (or null if it was not previously set). If newVal is null, set the property
* to its default value;
*
+ * @param property property name.
+ * @param newVal new value.
+ * @throws ReconfigurationException if there was an error applying newVal.
* If the property cannot be changed, throw a
* {@link ReconfigurationException}.
*/
@@ -45,11 +48,14 @@ public interface Reconfigurable extends Configurable {
* If isPropertyReconfigurable returns true for a property,
* then changeConf should not throw an exception when changing
* this property.
+ * @param property property name.
+ * @return true if property reconfigurable; false if not.
*/
boolean isPropertyReconfigurable(String property);
/**
* Return all the properties that can be changed at run time.
+ * @return reconfigurable propertys.
*/
Collection getReconfigurableProperties();
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
index 35dfeb99f0b..1c451ca6d30 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
@@ -79,6 +79,7 @@ public abstract class ReconfigurableBase
/**
* Construct a ReconfigurableBase with the {@link Configuration}
* conf.
+ * @param conf configuration.
*/
public ReconfigurableBase(Configuration conf) {
super((conf == null) ? new Configuration() : conf);
@@ -91,6 +92,7 @@ public abstract class ReconfigurableBase
/**
* Create a new configuration.
+ * @return configuration.
*/
protected abstract Configuration getNewConf();
@@ -162,6 +164,7 @@ public abstract class ReconfigurableBase
/**
* Start a reconfiguration task to reload configuration in background.
+ * @throws IOException raised on errors performing I/O.
*/
public void startReconfigurationTask() throws IOException {
synchronized (reconfigLock) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
index 0935bf025fd..b22af76c9eb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
@@ -59,6 +59,10 @@ public class ReconfigurationException extends Exception {
/**
* Create a new instance of {@link ReconfigurationException}.
+ * @param property property name.
+ * @param newVal new value.
+ * @param oldVal old value.
+ * @param cause original exception.
*/
public ReconfigurationException(String property,
String newVal, String oldVal,
@@ -71,6 +75,9 @@ public class ReconfigurationException extends Exception {
/**
* Create a new instance of {@link ReconfigurationException}.
+ * @param property property name.
+ * @param newVal new value.
+ * @param oldVal old value.
*/
public ReconfigurationException(String property,
String newVal, String oldVal) {
@@ -82,6 +89,7 @@ public class ReconfigurationException extends Exception {
/**
* Get property that cannot be changed.
+ * @return property info.
*/
public String getProperty() {
return property;
@@ -89,6 +97,7 @@ public class ReconfigurationException extends Exception {
/**
* Get value to which property was supposed to be changed.
+ * @return new value.
*/
public String getNewValue() {
return newVal;
@@ -96,6 +105,7 @@ public class ReconfigurationException extends Exception {
/**
* Get old value of property that cannot be changed.
+ * @return old value.
*/
public String getOldValue() {
return oldVal;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
index 05ec90758e5..ca9ddb61566 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
@@ -42,7 +42,8 @@ public class ReconfigurationTaskStatus {
/**
* Return true if
* - A reconfiguration task has finished or
- * - an active reconfiguration task is running
+ * - an active reconfiguration task is running.
+ * @return true if startTime > 0; false if not.
*/
public boolean hasTask() {
return startTime > 0;
@@ -51,6 +52,7 @@ public class ReconfigurationTaskStatus {
/**
* Return true if the latest reconfiguration task has finished and there is
* no another active task running.
+ * @return true if endTime > 0; false if not.
*/
public boolean stopped() {
return endTime > 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
index 64c754faa59..e6813b96a26 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
@@ -145,14 +145,18 @@ public abstract class CryptoCodec implements Configurable, Closeable {
public abstract CipherSuite getCipherSuite();
/**
- * Create a {@link org.apache.hadoop.crypto.Encryptor}.
- * @return Encryptor the encryptor
+ * Create a {@link org.apache.hadoop.crypto.Encryptor}.
+ *
+ * @return Encryptor the encryptor.
+ * @throws GeneralSecurityException thrown if create encryptor error.
*/
public abstract Encryptor createEncryptor() throws GeneralSecurityException;
-
+
/**
* Create a {@link org.apache.hadoop.crypto.Decryptor}.
+ *
* @return Decryptor the decryptor
+ * @throws GeneralSecurityException thrown if create decryptor error.
*/
public abstract Decryptor createDecryptor() throws GeneralSecurityException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
index 5ab5d341fb8..067abde9dfb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
@@ -157,7 +157,7 @@ public class CryptoInputStream extends FilterInputStream implements
* @param off the buffer offset.
* @param len the maximum number of decrypted data bytes to read.
* @return int the total number of decrypted data bytes read into the buffer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public int read(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
index 8e752211255..2a1335b6e74 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
@@ -146,7 +146,7 @@ public class CryptoOutputStream extends FilterOutputStream implements
* @param b the data.
* @param off the start offset in the data.
* @param len the number of bytes to write.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public synchronized void write(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index 318975fd6ce..dad4d20df2a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -39,7 +39,11 @@ public class CryptoStreamUtils {
private static final Logger LOG =
LoggerFactory.getLogger(CryptoStreamUtils.class);
- /** Forcibly free the direct buffer. */
+ /**
+ * Forcibly free the direct buffer.
+ *
+ * @param buffer buffer.
+ */
public static void freeDB(ByteBuffer buffer) {
if (CleanerUtil.UNMAP_SUPPORTED) {
try {
@@ -52,13 +56,22 @@ public class CryptoStreamUtils {
}
}
- /** Read crypto buffer size */
+ /**
+ * Read crypto buffer size.
+ *
+ * @param conf configuration.
+ * @return hadoop.security.crypto.buffer.size.
+ */
public static int getBufferSize(Configuration conf) {
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
}
-
- /** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */
+
+ /**
+ * AES/CTR/NoPadding or SM4/CTR/NoPadding is required.
+ *
+ * @param codec crypto codec.
+ */
public static void checkCodec(CryptoCodec codec) {
if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
@@ -67,17 +80,27 @@ public class CryptoStreamUtils {
}
}
- /** Check and floor buffer size */
+ /**
+ * Check and floor buffer size.
+ *
+ * @param codec crypto codec.
+ * @param bufferSize the size of the buffer to be used.
+ * @return calc buffer size.
+ */
public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE,
"Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
return bufferSize - bufferSize % codec.getCipherSuite()
.getAlgorithmBlockSize();
}
-
+
/**
* If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
* current position, otherwise return 0;
+ *
+ * @param in wrapper.
+ * @return current position, otherwise return 0.
+ * @throws IOException raised on errors performing I/O.
*/
public static long getInputStreamOffset(InputStream in) throws IOException {
if (in instanceof Seekable) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 0c65b74b291..b166cfc8611 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -225,34 +225,33 @@ public final class OpensslCipher {
output.position(output.position() + len);
return len;
}
-
+
/**
* Finishes a multiple-part operation. The data is encrypted or decrypted,
* depending on how this cipher was initialized.
*
- *
* The result is stored in the output buffer. Upon return, the output buffer's
* position will have advanced by n, where n is the value returned by this
* method; the output buffer's limit will not have changed.
- *
- *
+ *
* If output.remaining() bytes are insufficient to hold the result,
* a ShortBufferException is thrown.
*
- *
* Upon finishing, this method resets this cipher object to the state it was
* in when previously initialized. That is, the object is available to encrypt
* or decrypt more data.
- *
- *
- * If any exception is thrown, this cipher object need to be reset before it
+ *
+ * If any exception is thrown, this cipher object need to be reset before it
* can be used again.
- *
+ *
* @param output the output ByteBuffer
* @return int number of bytes stored in output
- * @throws ShortBufferException
- * @throws IllegalBlockSizeException
- * @throws BadPaddingException
+ * @throws ShortBufferException if there is insufficient space in the output buffer.
+ * @throws IllegalBlockSizeException This exception is thrown when the length
+ * of data provided to a block cipher is incorrect.
+ * @throws BadPaddingException This exception is thrown when a particular
+ * padding mechanism is expected for the input
+ * data but the data is not padded properly.
*/
public int doFinal(ByteBuffer output) throws ShortBufferException,
IllegalBlockSizeException, BadPaddingException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index dafdaf7e15b..4d1674bd7b8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -242,7 +242,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Serialize the metadata to a set of bytes.
* @return the serialized bytes
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected byte[] serialize() throws IOException {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
@@ -281,7 +281,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Deserialize a new metadata object from a set of bytes.
* @param bytes the serialized metadata
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected Metadata(byte[] bytes) throws IOException {
String cipher = null;
@@ -450,7 +450,7 @@ public abstract class KeyProvider implements Closeable {
* when decrypting data.
* @param versionName the name of a specific version of the key
* @return the key material
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion getKeyVersion(String versionName
) throws IOException;
@@ -458,14 +458,15 @@ public abstract class KeyProvider implements Closeable {
/**
* Get the key names for all keys.
* @return the list of key names
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getKeys() throws IOException;
/**
* Get key metadata in bulk.
* @param names the names of the keys to get
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @return Metadata Array.
*/
public Metadata[] getKeysMetadata(String... names) throws IOException {
Metadata[] result = new Metadata[names.length];
@@ -477,8 +478,10 @@ public abstract class KeyProvider implements Closeable {
/**
* Get the key material for all versions of a specific key name.
+ *
+ * @param name the base name of the key.
* @return the list of key material
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getKeyVersions(String name) throws IOException;
@@ -488,7 +491,7 @@ public abstract class KeyProvider implements Closeable {
* @param name the base name of the key
* @return the version name of the current version of the key or null if the
* key version doesn't exist
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public KeyVersion getCurrentKey(String name) throws IOException {
Metadata meta = getMetadata(name);
@@ -502,7 +505,7 @@ public abstract class KeyProvider implements Closeable {
* Get metadata about the key.
* @param name the basename of the key
* @return the key's metadata or null if the key doesn't exist
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract Metadata getMetadata(String name) throws IOException;
@@ -512,7 +515,7 @@ public abstract class KeyProvider implements Closeable {
* @param material the key material for the first version of the key.
* @param options the options for the new key.
* @return the version name of the first version of the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion createKey(String name, byte[] material,
Options options) throws IOException;
@@ -537,7 +540,7 @@ public abstract class KeyProvider implements Closeable {
* @param size length of the key.
* @param algorithm algorithm to use for generating the key.
* @return the generated key.
- * @throws NoSuchAlgorithmException
+ * @throws NoSuchAlgorithmException no such algorithm exception.
*/
protected byte[] generateKey(int size, String algorithm)
throws NoSuchAlgorithmException {
@@ -558,8 +561,8 @@ public abstract class KeyProvider implements Closeable {
* @param name the base name of the key
* @param options the options for the new key.
* @return the version name of the first version of the key.
- * @throws IOException
- * @throws NoSuchAlgorithmException
+ * @throws IOException raised on errors performing I/O.
+ * @throws NoSuchAlgorithmException no such algorithm exception.
*/
public KeyVersion createKey(String name, Options options)
throws NoSuchAlgorithmException, IOException {
@@ -570,7 +573,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Delete the given key.
* @param name the name of the key to delete
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteKey(String name) throws IOException;
@@ -579,7 +582,7 @@ public abstract class KeyProvider implements Closeable {
* @param name the basename of the key
* @param material the new key material
* @return the name of the new version of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion rollNewVersion(String name,
byte[] material
@@ -601,7 +604,10 @@ public abstract class KeyProvider implements Closeable {
*
* @param name the basename of the key
* @return the name of the new version of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @throws NoSuchAlgorithmException This exception is thrown when a particular
+ * cryptographic algorithm is requested
+ * but is not available in the environment.
*/
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
IOException {
@@ -620,7 +626,7 @@ public abstract class KeyProvider implements Closeable {
* version of the given key.
*
* @param name the basename of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void invalidateCache(String name) throws IOException {
// NOP
@@ -628,7 +634,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Ensures that any changes to the keys are written to persistent store.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void flush() throws IOException;
@@ -637,7 +643,7 @@ public abstract class KeyProvider implements Closeable {
* "/aaa/bbb".
* @param versionName the version name to split
* @return the base name of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static String getBaseName(String versionName) throws IOException {
int div = versionName.lastIndexOf('@');
@@ -660,9 +666,11 @@ public abstract class KeyProvider implements Closeable {
/**
* Find the provider with the given key.
+ *
* @param providerList the list of providers
- * @param keyName the key name we are looking for
+ * @param keyName the key name we are looking for.
* @return the KeyProvider that has the key
+ * @throws IOException raised on errors performing I/O.
*/
public static KeyProvider findProvider(List providerList,
String keyName) throws IOException {
@@ -680,7 +688,7 @@ public abstract class KeyProvider implements Closeable {
* means. If true, the password should be provided by the caller using
* setPassword().
* @return Whether or not the provider requires a password
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean needsPassword() throws IOException {
return false;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
index 3f3c367fc39..d706e5ef100 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
@@ -178,6 +178,7 @@ public class KeyProviderCryptoExtension extends
* Calls to this method allows the underlying KeyProvider to warm-up any
* implementation specific caches used to store the Encrypted Keys.
* @param keyNames Array of Key Names
+ * @throws IOException thrown if the key material could not be encrypted.
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException;
@@ -474,8 +475,9 @@ public class KeyProviderCryptoExtension extends
/**
* This constructor is to be used by sub classes that provide
* delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
- * @param keyProvider
- * @param extension
+ *
+ * @param keyProvider key provider.
+ * @param extension crypto extension.
*/
protected KeyProviderCryptoExtension(KeyProvider keyProvider,
CryptoExtension extension) {
@@ -486,6 +488,7 @@ public class KeyProviderCryptoExtension extends
* Notifies the Underlying CryptoExtension implementation to warm up any
* implementation specific caches for the specified KeyVersions
* @param keyNames Arrays of key Names
+ * @throws IOException raised on errors performing I/O.
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException {
@@ -557,7 +560,7 @@ public class KeyProviderCryptoExtension extends
* Calls {@link CryptoExtension#drain(String)} for the given key name on the
* underlying {@link CryptoExtension}.
*
- * @param keyName
+ * @param keyName key name.
*/
public void drain(String keyName) {
getExtension().drain(keyName);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
index 1fdc2fe1245..3c1af424eb7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
@@ -48,14 +48,14 @@ public class KeyProviderDelegationTokenExtension extends
* Renews the given token.
* @param token The token to be renewed.
* @return The token's lifetime after renewal, or 0 if it can't be renewed.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
long renewDelegationToken(final Token> token) throws IOException;
/**
* Cancels the given token.
* @param token The token to be cancelled.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
Void cancelDelegationToken(final Token> token) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
index a75f7d3aa63..c18d0d41bc0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
@@ -75,7 +75,7 @@ public class KeyShell extends CommandShell {
*
* @param args Command line arguments.
* @return 0 on success, 1 on failure.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
protected int init(String[] args) throws IOException {
@@ -547,7 +547,7 @@ public class KeyShell extends CommandShell {
* success and 1 for failure.
*
* @param args Command line arguments.
- * @throws Exception
+ * @throws Exception raised on errors performing I/O.
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
index be2db05842c..65eded918d6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
@@ -63,7 +63,7 @@ public class ValueQueue {
* @param keyName Key name
* @param keyQueue Queue that needs to be filled
* @param numValues number of Values to be added to the queue.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void fillQueueForKey(String keyName,
Queue keyQueue, int numValues) throws IOException;
@@ -268,7 +268,7 @@ public class ValueQueue {
* Initializes the Value Queues for the provided keys by calling the
* fill Method with "numInitValues" values
* @param keyNames Array of key Names
- * @throws ExecutionException
+ * @throws ExecutionException executionException.
*/
public void initializeQueuesForKeys(String... keyNames)
throws ExecutionException {
@@ -285,8 +285,8 @@ public class ValueQueue {
* function to add 1 value to Queue and then drain it.
* @param keyName String key name
* @return E the next value in the Queue
- * @throws IOException
- * @throws ExecutionException
+ * @throws IOException raised on errors performing I/O.
+ * @throws ExecutionException executionException.
*/
public E getNext(String keyName)
throws IOException, ExecutionException {
@@ -344,8 +344,8 @@ public class ValueQueue {
* @param keyName String key name
* @param num Minimum number of values to return.
* @return {@literal List} values returned
- * @throws IOException
- * @throws ExecutionException
+ * @throws IOException raised on errors performing I/O.
+ * @throws ExecutionException execution exception.
*/
public List getAtMost(String keyName, int num) throws IOException,
ExecutionException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index d9818b472f0..a4737c548c8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -272,7 +272,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param supportedScheme the scheme supported by the implementor
* @param authorityNeeded if true then theURI must have authority, if false
* then the URI must have null authority.
- *
+ * @param defaultPort default port to use if port is not specified in the URI.
* @throws URISyntaxException uri has syntax error
*/
public AbstractFileSystem(final URI uri, final String supportedScheme,
@@ -281,11 +281,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort);
statistics = getStatistics(uri);
}
-
+
/**
- * Check that the Uri's scheme matches
- * @param uri
- * @param supportedScheme
+ * Check that the Uri's scheme matches.
+ *
+ * @param uri name URI of the FS.
+ * @param supportedScheme supported scheme.
*/
public void checkScheme(URI uri, String supportedScheme) {
String scheme = uri.getScheme();
@@ -362,7 +363,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* If the path is fully qualified URI, then its scheme and authority
* matches that of this file system. Otherwise the path must be
* slash-relative name.
- *
+ * @param path the path.
* @throws InvalidPathException if the path is invalid
*/
public void checkPath(Path path) {
@@ -431,7 +432,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* Make the path fully qualified to this file system
- * @param path
+ * @param path the path.
* @return the qualified path
*/
public Path makeQualified(Path path) {
@@ -496,9 +497,9 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* through any internal symlinks or mount point
* @param p path to be resolved
* @return fully qualified path
- * @throws FileNotFoundException
- * @throws AccessControlException
- * @throws IOException
+ * @throws FileNotFoundException when file not find throw.
+ * @throws AccessControlException when accees control error throw.
+ * @throws IOException raised on errors performing I/O.
* @throws UnresolvedLinkException if symbolic link on path cannot be
* resolved internally
*/
@@ -513,6 +514,18 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
* that the Path f must be fully qualified and the permission is absolute
* (i.e. umask has been applied).
+ *
+ * @param f the path.
+ * @param createFlag create_flag.
+ * @param opts create ops.
+ * @throws AccessControlException access controll exception.
+ * @throws FileAlreadyExistsException file already exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not dir exception.
+ * @throws UnsupportedFileSystemException unsupported file system exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return output stream.
*/
public final FSDataOutputStream create(final Path f,
final EnumSet createFlag, Options.CreateOpts... opts)
@@ -630,6 +643,24 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
* have been declared explicitly.
+ *
+ * @param f the path.
+ * @param flag create flag.
+ * @param absolutePermission absolute permission.
+ * @param bufferSize buffer size.
+ * @param replication replications.
+ * @param blockSize block size.
+ * @param progress progress.
+ * @param checksumOpt check sum opt.
+ * @param createParent create parent.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnsupportedFileSystemException unsupported filesystem exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return output stream.
*/
public abstract FSDataOutputStream createInternal(Path f,
EnumSet flag, FsPermission absolutePermission,
@@ -644,6 +675,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
* f must be fully qualified and the permission is absolute (i.e.
* umask has been applied).
+ * @param dir directory.
+ * @param permission permission.
+ * @param createParent create parent flag.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void mkdir(final Path dir, final FsPermission permission,
final boolean createParent) throws AccessControlException,
@@ -654,6 +693,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#delete(Path, boolean)} except that Path f must be for
* this file system.
+ *
+ * @param f the path.
+ * @param recursive recursive flag.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully deleted success true, not false.
*/
public abstract boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
@@ -663,6 +710,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#open(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return input stream.
*/
public FSDataInputStream open(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
@@ -673,6 +727,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#open(Path, int)} except that Path f must be for this
* file system.
+ *
+ * @param f the path.
+ * @param bufferSize buffer size.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully open success true, not false.
*/
public abstract FSDataInputStream open(final Path f, int bufferSize)
throws AccessControlException, FileNotFoundException,
@@ -682,6 +744,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#truncate(Path, long)} except that Path f must be for
* this file system.
+ *
+ * @param f the path.
+ * @param newLength new length.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully truncate success true, not false.
*/
public boolean truncate(Path f, long newLength)
throws AccessControlException, FileNotFoundException,
@@ -694,6 +764,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setReplication(Path, short)} except that Path f must be
* for this file system.
+ *
+ * @param f the path.
+ * @param replication replication.
+ * @return if successfully set replication success true, not false.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract boolean setReplication(final Path f,
final short replication) throws AccessControlException,
@@ -703,6 +781,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this file system.
+ *
+ * @param src src.
+ * @param dst dst.
+ * @param options options.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public final void rename(final Path src, final Path dst,
final Options.Rename... options) throws AccessControlException,
@@ -727,6 +815,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* File systems that do not have a built in overwrite need implement only this
* method and can take advantage of the default impl of the other
* {@link #renameInternal(Path, Path, boolean)}
+ *
+ * @param src src.
+ * @param dst dst.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void renameInternal(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
@@ -737,6 +834,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this file system.
+ *
+ * @param src src.
+ * @param dst dst.
+ * @param overwrite overwrite flag.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public void renameInternal(final Path src, final Path dst,
boolean overwrite) throws AccessControlException,
@@ -800,6 +907,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#createSymlink(Path, Path, boolean)};
+ *
+ * @param target target.
+ * @param link link.
+ * @param createParent create parent.
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnresolvedLinkException unresolved link exception.
*/
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws IOException, UnresolvedLinkException {
@@ -810,6 +923,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* Partially resolves the path. This is used during symlink resolution in
* {@link FSLinkResolver}, and differs from the similarly named method
* {@link FileContext#getLinkTarget(Path)}.
+ * @param f the path.
+ * @return target path.
* @throws IOException subclass implementations may throw IOException
*/
public Path getLinkTarget(final Path f) throws IOException {
@@ -822,6 +937,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setPermission(Path, FsPermission)} except that Path f
* must be for this file system.
+ *
+ * @param f the path.
+ * @param permission permission.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setPermission(final Path f,
final FsPermission permission) throws AccessControlException,
@@ -831,6 +953,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setOwner(Path, String, String)} except that Path f must
* be for this file system.
+ *
+ * @param f the path.
+ * @param username username.
+ * @param groupname groupname.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setOwner(final Path f, final String username,
final String groupname) throws AccessControlException,
@@ -840,6 +970,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setTimes(Path, long, long)} except that Path f must be
* for this file system.
+ *
+ * @param f the path.
+ * @param mtime modify time.
+ * @param atime access time.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setTimes(final Path f, final long mtime,
final long atime) throws AccessControlException, FileNotFoundException,
@@ -849,6 +987,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#getFileChecksum(Path)} except that Path f must be for
* this file system.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return File Check sum.
*/
public abstract FileChecksum getFileChecksum(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -859,6 +1004,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#getFileStatus(Path)}
* except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return File Status
*/
public abstract FileStatus getFileStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -870,8 +1022,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* In some FileSystem implementations such as HDFS metadata
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnsupportedOperationException Unsupported Operation Exception.
*/
public void msync() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -883,6 +1035,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#access(Path, FsAction)}
* except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path.
+ *
+ * @param path the path.
+ * @param mode fsaction mode.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
public void access(Path path, FsAction mode) throws AccessControlException,
@@ -897,6 +1056,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* encountered in the path leading up to the final path component.
* If the file system does not support symlinks then the behavior is
* equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnsupportedFileSystemException UnSupported File System Exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return file status.
*/
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -908,6 +1074,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#getFileBlockLocations(Path, long, long)} except that
* Path f must be for this file system.
+ *
+ * @param f the path.
+ * @param start start.
+ * @param len length.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return BlockLocation Array.
*/
public abstract BlockLocation[] getFileBlockLocations(final Path f,
final long start, final long len) throws AccessControlException,
@@ -917,6 +1092,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return Fs Status.
*/
public FsStatus getFsStatus(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
@@ -927,6 +1109,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)}.
+ *
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return Fs Status.
*/
public abstract FsStatus getFsStatus() throws AccessControlException,
FileNotFoundException, IOException;
@@ -935,6 +1122,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#listStatus(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator.
*/
public RemoteIterator listStatusIterator(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -967,6 +1161,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* will have different formats for replicated and erasure coded file. Please
* refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
* for more details.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator.
*/
public RemoteIterator listLocatedStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -999,6 +1200,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext.Util#listStatus(Path)} except that Path f must be
* for this file system.
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator.
*/
public abstract FileStatus[] listStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -1007,7 +1214,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
- * @throws IOException
+ * @param path the path.
+ * @throws IOException raised on errors performing I/O.
*/
public RemoteIterator listCorruptFileBlocks(Path path)
throws IOException {
@@ -1020,6 +1228,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
* must be for this file system.
+ *
+ * @param verifyChecksum verify check sum flag.
+ * @throws AccessControlException access control exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setVerifyChecksum(final boolean verifyChecksum)
throws AccessControlException, IOException;
@@ -1041,7 +1253,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
* If delegation tokens not supported then return a list of size zero.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List> getDelegationTokens(String renewer) throws IOException {
@@ -1141,7 +1353,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param path Path to modify
* @param name xattr name.
* @param value xattr value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
@@ -1160,7 +1372,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param name xattr name.
* @param value xattr value.
* @param flag xattr set flag
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void setXAttr(Path path, String name, byte[] value,
EnumSet flag) throws IOException {
@@ -1178,7 +1390,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param path Path to get extended attribute
* @param name xattr name.
* @return byte[] xattr value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public byte[] getXAttr(Path path, String name) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1196,7 +1408,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
*
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Map getXAttrs(Path path) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1214,7 +1426,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param names XAttr names.
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Map getXAttrs(Path path, List names)
throws IOException {
@@ -1232,7 +1444,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param path Path to get extended attributes
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public List listXAttrs(Path path)
throws IOException {
@@ -1249,7 +1461,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
*
* @param path Path to remove extended attribute
* @param name xattr name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void removeXAttr(Path path, String name) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1259,6 +1471,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#createSnapshot(Path, String)}.
+ *
+ * @param path the path.
+ * @param snapshotName snapshot name.
+ * @throws IOException raised on errors performing I/O.
+ * @return path.
*/
public Path createSnapshot(final Path path, final String snapshotName)
throws IOException {
@@ -1269,6 +1486,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#renameSnapshot(Path, String, String)}.
+ *
+ * @param path the path.
+ * @param snapshotOldName snapshot old name.
+ * @param snapshotNewName snapshot new name.
+ * @throws IOException raised on errors performing I/O.
*/
public void renameSnapshot(final Path path, final String snapshotOldName,
final String snapshotNewName) throws IOException {
@@ -1279,6 +1501,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#deleteSnapshot(Path, String)}.
+ *
+ * @param snapshotDir snapshot dir.
+ * @param snapshotName snapshot name.
+ * @throws IOException raised on errors performing I/O.
*/
public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
throws IOException {
@@ -1289,7 +1515,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void satisfyStoragePolicy(final Path path) throws IOException {
throw new UnsupportedOperationException(
@@ -1303,6 +1529,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param policyName the name of the target storage policy. The list
* of supported Storage policies can be retrieved
* via {@link #getAllStoragePolicies}.
+ * @throws IOException raised on errors performing I/O.
*/
public void setStoragePolicy(final Path path, final String policyName)
throws IOException {
@@ -1314,7 +1541,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* Unset the storage policy set for a given file or directory.
* @param src file or directory path.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void unsetStoragePolicy(final Path src) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1326,7 +1553,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
*
* @param src file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockStoragePolicySpi getStoragePolicy(final Path src)
throws IOException {
@@ -1338,7 +1565,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* Retrieve all the storage policies supported by this file system.
*
* @return all storage policies supported by this filesystem.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Collection extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
index 213fbc24c4d..7518dd2f7ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
@@ -36,13 +36,22 @@ public class AvroFSInput implements Closeable, SeekableInput {
private final FSDataInputStream stream;
private final long len;
- /** Construct given an {@link FSDataInputStream} and its length. */
+ /**
+ * Construct given an {@link FSDataInputStream} and its length.
+ *
+ * @param in inputstream.
+ * @param len len.
+ */
public AvroFSInput(final FSDataInputStream in, final long len) {
this.stream = in;
this.len = len;
}
- /** Construct given a {@link FileContext} and a {@link Path}. */
+ /** Construct given a {@link FileContext} and a {@link Path}.
+ * @param fc filecontext.
+ * @param p the path.
+ * @throws IOException If an I/O error occurred.
+ * */
public AvroFSInput(final FileContext fc, final Path p) throws IOException {
FileStatus status = fc.getFileStatus(p);
this.len = status.getLen();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
index 607fffbcc70..e693bcbfe89 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
@@ -68,6 +68,7 @@ public abstract class BatchedRemoteIterator implements RemoteIterator {
*
* @param prevKey The key to send.
* @return A list of replies.
+ * @throws IOException If an I/O error occurred.
*/
public abstract BatchedEntries makeRequest(K prevKey) throws IOException;
@@ -102,6 +103,8 @@ public abstract class BatchedRemoteIterator implements RemoteIterator {
/**
* Return the next list key associated with an element.
+ * @param element element.
+ * @return K Generics Type.
*/
public abstract K elementToPrevKey(E element);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
index 29358dd7d10..67687c1f0e0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
@@ -85,6 +85,7 @@ public class BlockLocation implements Serializable {
/**
* Copy constructor.
+ * @param that blocklocation.
*/
public BlockLocation(BlockLocation that) {
this.hosts = that.hosts;
@@ -100,6 +101,10 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, offset and length.
+ * @param names names array.
+ * @param hosts host array.
+ * @param offset offset.
+ * @param length length.
*/
public BlockLocation(String[] names, String[] hosts, long offset,
long length) {
@@ -108,6 +113,11 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, offset, length and corrupt flag.
+ * @param names names.
+ * @param hosts hosts.
+ * @param offset offset.
+ * @param length length.
+ * @param corrupt corrupt.
*/
public BlockLocation(String[] names, String[] hosts, long offset,
long length, boolean corrupt) {
@@ -116,6 +126,11 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, network topology, offset and length.
+ * @param names names.
+ * @param hosts hosts.
+ * @param topologyPaths topologyPaths.
+ * @param offset offset.
+ * @param length length.
*/
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
long offset, long length) {
@@ -125,6 +140,12 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, network topology, offset, length
* and corrupt flag.
+ * @param names names.
+ * @param hosts hosts.
+ * @param topologyPaths topologyPaths.
+ * @param offset offset.
+ * @param length length.
+ * @param corrupt corrupt.
*/
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
long offset, long length, boolean corrupt) {
@@ -177,6 +198,8 @@ public class BlockLocation implements Serializable {
/**
* Get the list of hosts (hostname) hosting this block.
+ * @return hosts array.
+ * @throws IOException If an I/O error occurred.
*/
public String[] getHosts() throws IOException {
return hosts;
@@ -184,6 +207,7 @@ public class BlockLocation implements Serializable {
/**
* Get the list of hosts (hostname) hosting a cached replica of the block.
+ * @return cached hosts.
*/
public String[] getCachedHosts() {
return cachedHosts;
@@ -191,6 +215,8 @@ public class BlockLocation implements Serializable {
/**
* Get the list of names (IP:xferPort) hosting this block.
+ * @return names array.
+ * @throws IOException If an I/O error occurred.
*/
public String[] getNames() throws IOException {
return names;
@@ -199,6 +225,8 @@ public class BlockLocation implements Serializable {
/**
* Get the list of network topology paths for each of the hosts.
* The last component of the path is the "name" (IP:xferPort).
+ * @return topology paths.
+ * @throws IOException If an I/O error occurred.
*/
public String[] getTopologyPaths() throws IOException {
return topologyPaths;
@@ -206,6 +234,7 @@ public class BlockLocation implements Serializable {
/**
* Get the storageID of each replica of the block.
+ * @return storage ids.
*/
public String[] getStorageIds() {
return storageIds;
@@ -213,6 +242,7 @@ public class BlockLocation implements Serializable {
/**
* Get the storage type of each replica of the block.
+ * @return storage type of each replica of the block.
*/
public StorageType[] getStorageTypes() {
return storageTypes;
@@ -220,6 +250,7 @@ public class BlockLocation implements Serializable {
/**
* Get the start offset of file associated with this block.
+ * @return start offset of file associated with this block.
*/
public long getOffset() {
return offset;
@@ -227,6 +258,7 @@ public class BlockLocation implements Serializable {
/**
* Get the length of the block.
+ * @return length of the block.
*/
public long getLength() {
return length;
@@ -234,6 +266,7 @@ public class BlockLocation implements Serializable {
/**
* Get the corrupt flag.
+ * @return corrupt flag.
*/
public boolean isCorrupt() {
return corrupt;
@@ -241,6 +274,7 @@ public class BlockLocation implements Serializable {
/**
* Return true if the block is striped (erasure coded).
+ * @return if the block is striped true, not false.
*/
public boolean isStriped() {
return false;
@@ -248,6 +282,7 @@ public class BlockLocation implements Serializable {
/**
* Set the start offset of file associated with this block.
+ * @param offset start offset.
*/
public void setOffset(long offset) {
this.offset = offset;
@@ -255,6 +290,7 @@ public class BlockLocation implements Serializable {
/**
* Set the length of block.
+ * @param length length of block.
*/
public void setLength(long length) {
this.length = length;
@@ -262,6 +298,7 @@ public class BlockLocation implements Serializable {
/**
* Set the corrupt flag.
+ * @param corrupt corrupt flag.
*/
public void setCorrupt(boolean corrupt) {
this.corrupt = corrupt;
@@ -269,6 +306,8 @@ public class BlockLocation implements Serializable {
/**
* Set the hosts hosting this block.
+ * @param hosts hosts array.
+ * @throws IOException If an I/O error occurred.
*/
public void setHosts(String[] hosts) throws IOException {
if (hosts == null) {
@@ -280,6 +319,7 @@ public class BlockLocation implements Serializable {
/**
* Set the hosts hosting a cached replica of this block.
+ * @param cachedHosts cached hosts.
*/
public void setCachedHosts(String[] cachedHosts) {
if (cachedHosts == null) {
@@ -291,6 +331,8 @@ public class BlockLocation implements Serializable {
/**
* Set the names (host:port) hosting this block.
+ * @param names names.
+ * @throws IOException If an I/O error occurred.
*/
public void setNames(String[] names) throws IOException {
if (names == null) {
@@ -302,6 +344,9 @@ public class BlockLocation implements Serializable {
/**
* Set the network topology paths of the hosts.
+ *
+ * @param topologyPaths topology paths.
+ * @throws IOException If an I/O error occurred.
*/
public void setTopologyPaths(String[] topologyPaths) throws IOException {
if (topologyPaths == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
index 6576fe5827d..f577649dd5f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
@@ -47,6 +47,12 @@ public final class ByteBufferUtil {
/**
* Perform a fallback read.
+ *
+ * @param stream input stream.
+ * @param bufferPool bufferPool.
+ * @param maxLength maxLength.
+ * @throws IOException raised on errors performing I/O.
+ * @return byte buffer.
*/
public static ByteBuffer fallbackRead(
InputStream stream, ByteBufferPool bufferPool, int maxLength)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
index 362d125b09d..d7b61346d4e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
@@ -53,6 +53,9 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* This is the constructor used by the builder.
* All overriding classes should implement this.
+ *
+ * @param builder builder.
+ * @throws IOException raised on errors performing I/O.
*/
public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
throws IOException {
@@ -140,6 +143,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* Increment the cached value of used space.
+ *
+ * @param value dfs used value.
*/
public void incDfsUsed(long value) {
used.addAndGet(value);
@@ -154,6 +159,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* How long in between runs of the background refresh.
+ *
+ * @return refresh interval.
*/
@VisibleForTesting
public long getRefreshInterval() {
@@ -163,6 +170,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* Randomize the refresh interval timing by this amount, the actual interval will be chosen
* uniformly between {@code interval-jitter} and {@code interval+jitter}.
+ *
+ * @return between interval-jitter and interval+jitter.
*/
@VisibleForTesting
public long getJitter() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
index 59ffe00bcb2..0efcdc8022f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
@@ -102,25 +102,44 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
return fs;
}
- /** Return the name of the checksum file associated with a file.*/
+ /**
+ * Return the name of the checksum file associated with a file.
+ *
+ * @param file the file path.
+ * @return name of the checksum file associated with a file.
+ */
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
- /** Return true iff file is a checksum file name.*/
+ /**
+ * Return true if file is a checksum file name.
+ *
+ * @param file the file path.
+ * @return if file is a checksum file true, not false.
+ */
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
- /** Return the length of the checksum file given the size of the
+ /**
+ * Return the length of the checksum file given the size of the
* actual file.
- **/
+ *
+ * @param file the file path.
+ * @param fileSize file size.
+ * @return checksum length.
+ */
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
- /** Return the bytes Per Checksum */
+ /**
+ * Return the bytes Per Checksum.
+ *
+ * @return bytes per check sum.
+ */
public int getBytesPerSum() {
return bytesPerChecksum;
}
@@ -362,6 +381,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* Opens an FSDataInputStream at the indicated Path.
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
+ * @throws IOException if an I/O error occurs.
*/
@Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
@@ -669,7 +689,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* Implement the abstract setReplication of FileSystem
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
* @return true if successful;
* false if file does not exist or is a directory
*/
@@ -754,7 +774,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* @param f
* given path
* @return the statuses of the files/directories in the given path
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
*/
@Override
public FileStatus[] listStatus(Path f) throws IOException {
@@ -775,7 +795,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* @param f
* given path
* @return the statuses of the files/directories in the given patch
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
*/
@Override
public RemoteIterator listLocatedStatus(Path f)
@@ -811,6 +831,10 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* Copy it from FS control to the local dst name.
* If src and dst are directories, the copyCrc parameter
* determines whether to copy CRC files.
+ * @param src src path.
+ * @param dst dst path.
+ * @param copyCrc copy csc flag.
+ * @throws IOException if an I/O error occurs.
*/
@SuppressWarnings("deprecation")
public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index bc1122c56a2..4820c5c3045 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -70,30 +70,53 @@ public abstract class ChecksumFs extends FilterFs {
this.verifyChecksum = inVerifyChecksum;
}
- /** get the raw file system. */
+ /**
+ * get the raw file system.
+ *
+ * @return abstract file system.
+ */
public AbstractFileSystem getRawFs() {
return getMyFs();
}
- /** Return the name of the checksum file associated with a file.*/
+ /**
+ * Return the name of the checksum file associated with a file.
+ *
+ * @param file the file path.
+ * @return the checksum file associated with a file.
+ */
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
- /** Return true iff file is a checksum file name.*/
+ /**
+ * Return true iff file is a checksum file name.
+ *
+ * @param file the file path.
+ * @return if is checksum file true,not false.
+ */
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
- /** Return the length of the checksum file given the size of the
+ /**
+ * Return the length of the checksum file given the size of the
* actual file.
- **/
+ *
+ * @param file the file path.
+ * @param fileSize file size.
+ * @return check sum file length.
+ */
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
- /** Return the bytes Per Checksum. */
+ /**
+ * Return the bytes Per Checksum.
+ *
+ * @return bytes per sum.
+ */
public int getBytesPerSum() {
return defaultBytesPerChecksum;
}
@@ -433,7 +456,7 @@ public abstract class ChecksumFs extends FilterFs {
* Implement the abstract setReplication of FileSystem
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
* @return true if successful;
* false if file does not exist or is a directory
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index a799e883bcf..52252365092 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic {
/**
* Number of filesystems instances can be created in parallel.
- *
+ *
* A higher number here does not necessarily improve performance, especially
* for object stores, where multiple threads may be attempting to create an FS
* instance for the same URI.
- *
+ *
* Default value: {@value}.
*/
public static final String FS_CREATION_PARALLEL_COUNT =
@@ -181,8 +181,9 @@ public class CommonConfigurationKeysPublic {
/**
* Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
- *
+ *
* Default value: {@value}.
+ *
*/
public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
64;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
index e1ed5cbcfca..bdbc8f3a33f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
@@ -37,7 +37,13 @@ public class CompositeCrcFileChecksum extends FileChecksum {
private DataChecksum.Type crcType;
private int bytesPerCrc;
- /** Create a CompositeCrcFileChecksum. */
+ /**
+ * Create a CompositeCrcFileChecksum.
+ *
+ * @param crc crc.
+ * @param crcType crcType.
+ * @param bytesPerCrc bytesPerCrc.
+ */
public CompositeCrcFileChecksum(
int crc, DataChecksum.Type crcType, int bytesPerCrc) {
this.crc = crc;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
index 79850e1a2f2..9f97a12fa60 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
@@ -149,17 +149,31 @@ public class ContentSummary extends QuotaUsage implements Writable{
@Deprecated
public ContentSummary() {}
- /** Constructor, deprecated by ContentSummary.Builder
+ /**
+ * Constructor, deprecated by ContentSummary.Builder
* This constructor implicitly set spaceConsumed the same as length.
* spaceConsumed and length must be set explicitly with
- * ContentSummary.Builder
+ * ContentSummary.Builder.
+ *
+ * @param length length.
+ * @param fileCount file count.
+ * @param directoryCount directory count.
* */
@Deprecated
public ContentSummary(long length, long fileCount, long directoryCount) {
this(length, fileCount, directoryCount, -1L, length, -1L);
}
- /** Constructor, deprecated by ContentSummary.Builder */
+ /**
+ * Constructor, deprecated by ContentSummary.Builder.
+ *
+ * @param length length.
+ * @param fileCount file count.
+ * @param directoryCount directory count.
+ * @param quota quota.
+ * @param spaceConsumed space consumed.
+ * @param spaceQuota space quota.
+ * */
@Deprecated
public ContentSummary(
long length, long fileCount, long directoryCount, long quota,
@@ -172,7 +186,11 @@ public class ContentSummary extends QuotaUsage implements Writable{
setSpaceQuota(spaceQuota);
}
- /** Constructor for ContentSummary.Builder*/
+ /**
+ * Constructor for ContentSummary.Builder.
+ *
+ * @param builder builder.
+ */
private ContentSummary(Builder builder) {
super(builder);
this.length = builder.length;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
index 71993713ad2..ca008e53693 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
@@ -189,6 +189,8 @@ public enum CreateFlag {
/**
* Validate the CreateFlag for the append operation. The flag must contain
* APPEND, and cannot contain OVERWRITE.
+ *
+ * @param flag enum set flag.
*/
public static void validateForAppend(EnumSet flag) {
validate(flag);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
index da4636b2c0f..c5a052f3de4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
@@ -65,7 +65,10 @@ public class DF extends Shell {
return dirPath;
}
- /** @return a string indicating which filesystem volume we're checking. */
+ /**
+ * @return a string indicating which filesystem volume we're checking.
+ * @throws IOException raised on errors performing I/O.
+ */
public String getFilesystem() throws IOException {
if (Shell.WINDOWS) {
this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
@@ -100,7 +103,10 @@ public class DF extends Shell {
return (int) (used * 100.0 / cap);
}
- /** @return the filesystem mount point for the indicated volume */
+ /**
+ * @return the filesystem mount point for the indicated volume.
+ * @throws IOException raised on errors performing I/O.
+ */
public String getMount() throws IOException {
// Abort early if specified path does not exist
if (!dirFile.exists()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
index 33905dcbb77..6f6e3041065 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
@@ -47,7 +47,11 @@ public class DelegationTokenRenewer
/** @return the renew token. */
public Token> getRenewToken();
- /** Set delegation token. */
+ /**
+ * Set delegation token.
+ * @param generic type T.
+ * @param token token.
+ */
public void setDelegationToken(Token token);
}
@@ -172,7 +176,11 @@ public class DelegationTokenRenewer
/** Queue to maintain the RenewActions to be processed by the {@link #run()} */
private volatile DelayQueue> queue = new DelayQueue>();
- /** For testing purposes */
+ /**
+ * For testing purposes.
+ *
+ * @return renew queue length.
+ */
@VisibleForTesting
protected int getRenewQueueLength() {
return queue.size();
@@ -211,7 +219,13 @@ public class DelegationTokenRenewer
}
}
- /** Add a renew action to the queue. */
+ /**
+ * Add a renew action to the queue.
+ *
+ * @param generic type T.
+ * @param fs file system.
+ * @return renew action.
+ * */
@SuppressWarnings("static-access")
public RenewAction addRenewAction(final T fs) {
synchronized (this) {
@@ -230,8 +244,10 @@ public class DelegationTokenRenewer
/**
* Remove the associated renew action from the queue
- *
- * @throws IOException
+ *
+ * @param generic type T.
+ * @param fs file system.
+ * @throws IOException raised on errors performing I/O.
*/
public void removeRenewAction(
final T fs) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
index a4c7254cfeb..56ef51f128d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
@@ -37,12 +37,17 @@ public interface FSBuilder> {
/**
* Set optional Builder parameter.
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
*/
B opt(@Nonnull String key, @Nonnull String value);
/**
* Set optional boolean parameter for the Builder.
- *
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, boolean value);
@@ -50,6 +55,9 @@ public interface FSBuilder> {
/**
* Set optional int parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, int value);
@@ -57,6 +65,9 @@ public interface FSBuilder> {
/**
* Set optional float parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, float value);
@@ -64,6 +75,9 @@ public interface FSBuilder> {
/**
* Set optional long parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, long value);
@@ -71,6 +85,9 @@ public interface FSBuilder> {
/**
* Set optional double parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, double value);
@@ -78,6 +95,9 @@ public interface FSBuilder> {
/**
* Set an array of string values as optional parameter for the Builder.
*
+ * @param key key.
+ * @param values values.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, @Nonnull String... values);
@@ -87,12 +107,19 @@ public interface FSBuilder> {
*
* If the option is not supported or unavailable,
* the client should expect {@link #build()} throws IllegalArgumentException.
+ *
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
*/
B must(@Nonnull String key, @Nonnull String value);
/**
* Set mandatory boolean option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, boolean value);
@@ -100,6 +127,9 @@ public interface FSBuilder> {
/**
* Set mandatory int option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, int value);
@@ -107,6 +137,9 @@ public interface FSBuilder> {
/**
* Set mandatory float option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, float value);
@@ -114,6 +147,9 @@ public interface FSBuilder> {
/**
* Set mandatory long option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, long value);
@@ -121,6 +157,9 @@ public interface FSBuilder> {
/**
* Set mandatory double option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, double value);
@@ -128,6 +167,9 @@ public interface FSBuilder> {
/**
* Set a string array as mandatory option.
*
+ * @param key key.
+ * @param values values.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, @Nonnull String... values);
@@ -139,6 +181,7 @@ public interface FSBuilder> {
* @throws UnsupportedOperationException if the filesystem does not support
* the specific operation.
* @throws IOException on filesystem IO errors.
+ * @return generic type S.
*/
S build() throws IllegalArgumentException,
UnsupportedOperationException, IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
index c96d499d17b..16938a83a69 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
@@ -123,6 +123,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Constructor.
+ *
+ * @param fileSystem file system.
+ * @param p the path.
*/
protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem,
@Nonnull Path p) {
@@ -149,6 +152,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set permission for the file.
+ *
+ * @param perm permission.
+ * @return B Generics Type.
*/
public B permission(@Nonnull final FsPermission perm) {
checkNotNull(perm);
@@ -162,6 +168,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set the size of the buffer to be used.
+ *
+ * @param bufSize buffer size.
+ * @return Generics Type B.
*/
public B bufferSize(int bufSize) {
bufferSize = bufSize;
@@ -174,6 +183,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set replication factor.
+ *
+ * @param replica replica.
+ * @return Generics Type B.
*/
public B replication(short replica) {
replication = replica;
@@ -186,6 +198,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set block size.
+ *
+ * @param blkSize block size.
+ * @return B Generics Type.
*/
public B blockSize(long blkSize) {
blockSize = blkSize;
@@ -194,6 +209,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Return true to create the parent directories if they do not exist.
+ *
+ * @return if create the parent directories if they do not exist true,not false.
*/
protected boolean isRecursive() {
return recursive;
@@ -201,6 +218,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Create the parent directory if they do not exist.
+ *
+ * @return B Generics Type.
*/
public B recursive() {
recursive = true;
@@ -213,6 +232,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set the facility of reporting progress.
+ *
+ * @param prog progress.
+ * @return B Generics Type.
*/
public B progress(@Nonnull final Progressable prog) {
checkNotNull(prog);
@@ -226,6 +248,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Create an FSDataOutputStream at the specified path.
+ *
+ * @return return Generics Type B.
*/
public B create() {
flags.add(CreateFlag.CREATE);
@@ -236,6 +260,9 @@ public abstract class FSDataOutputStreamBuilder
* Set to true to overwrite the existing file.
* Set it to false, an exception will be thrown when calling {@link #build()}
* if the file exists.
+ *
+ * @param overwrite overrite.
+ * @return Generics Type B.
*/
public B overwrite(boolean overwrite) {
if (overwrite) {
@@ -248,6 +275,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Append to an existing file (optional operation).
+ *
+ * @return Generics Type B.
*/
public B append() {
flags.add(CreateFlag.APPEND);
@@ -260,6 +289,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set checksum opt.
+ *
+ * @param chksumOpt check sum opt.
+ * @return Generics Type B.
*/
public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) {
checkNotNull(chksumOpt);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index de66eab713a..ee16ca8a2cd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -82,6 +82,7 @@ abstract public class FSInputChecker extends FSInputStream {
* @param sum the type of Checksum engine
* @param chunkSize maximun chunk size
* @param checksumSize the number byte of each checksum
+ * @param verifyChecksum verify check sum.
*/
protected FSInputChecker( Path file, int numOfRetries,
boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) {
@@ -118,6 +119,7 @@ abstract public class FSInputChecker extends FSInputStream {
* @param len maximum number of bytes to read
* @param checksum the data buffer into which to write checksums
* @return number of bytes read
+ * @throws IOException raised on errors performing I/O.
*/
abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
byte[] checksum) throws IOException;
@@ -129,7 +131,10 @@ abstract public class FSInputChecker extends FSInputStream {
*/
abstract protected long getChunkPosition(long pos);
- /** Return true if there is a need for checksum verification */
+ /**
+ * Return true if there is a need for checksum verification.
+ * @return if there is a need for checksum verification true, not false.
+ */
protected synchronized boolean needChecksum() {
return verifyChecksum && sum != null;
}
@@ -357,6 +362,9 @@ abstract public class FSInputChecker extends FSInputStream {
* Convert a checksum byte array to a long
* This is deprecated since 0.22 since it is no longer in use
* by this class.
+ *
+ * @param checksum check sum.
+ * @return crc.
*/
@Deprecated
static public long checksum2long(byte[] checksum) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
index ffe4b34ca5f..f85cf7a8581 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
@@ -74,7 +74,7 @@ public abstract class FSLinkResolver {
* @param fc FileContext used to access file systems.
* @param path The path to resolve symlinks on.
* @return Generic type determined by the implementation of next.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T resolve(final FileContext fc, final Path path) throws IOException {
int count = 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
index 6de026b9d17..4ef512dc257 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
@@ -186,6 +186,8 @@ abstract public class FSOutputSummer extends OutputStream implements
/**
* Return the number of valid bytes currently in the buffer.
+ *
+ * @return buffer data size.
*/
protected synchronized int getBufferedDataSize() {
return count;
@@ -227,6 +229,10 @@ abstract public class FSOutputSummer extends OutputStream implements
/**
* Converts a checksum integer value to a byte stream
+ *
+ * @param sum check sum.
+ * @param checksumSize check sum size.
+ * @return byte stream.
*/
static public byte[] convertToByteStream(Checksum sum, int checksumSize) {
return int2byte((int)sum.getValue(), new byte[checksumSize]);
@@ -245,6 +251,8 @@ abstract public class FSOutputSummer extends OutputStream implements
/**
* Resets existing buffer with a new one of the specified size.
+ *
+ * @param size size.
*/
protected synchronized void setChecksumBufSize(int size) {
this.buf = new byte[size];
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
index 6822fa48562..62d2e3af786 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
@@ -28,20 +28,37 @@ import org.apache.hadoop.io.Writable;
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class FileChecksum implements Writable {
- /** The checksum algorithm name */
+ /**
+ * The checksum algorithm name.
+ *
+ * @return algorithm name.
+ */
public abstract String getAlgorithmName();
- /** The length of the checksum in bytes */
+ /**
+ * The length of the checksum in bytes.
+ *
+ * @return length.
+ */
public abstract int getLength();
- /** The value of the checksum in bytes */
+ /**
+ * The value of the checksum in bytes.
+ *
+ * @return byte array.
+ */
public abstract byte[] getBytes();
public ChecksumOpt getChecksumOpt() {
return null;
}
- /** Return true if both the algorithms and the values are the same. */
+ /**
+ * Return true if both the algorithms and the values are the same.
+ *
+ * @param other other.
+ * @return if equal true, not false.
+ */
@Override
public boolean equals(Object other) {
if (other == this) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index f3004ce7e03..298570bb55f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -366,8 +366,8 @@ public class FileContext implements PathCapabilities {
* Create a FileContext with specified FS as default using the specified
* config.
*
- * @param defFS
- * @param aConf
+ * @param defFS default fs.
+ * @param aConf configutration.
* @return new FileContext with specified FS as default.
*/
public static FileContext getFileContext(final AbstractFileSystem defFS,
@@ -378,7 +378,7 @@ public class FileContext implements PathCapabilities {
/**
* Create a FileContext for specified file system using the default config.
*
- * @param defaultFS
+ * @param defaultFS default fs.
* @return a FileContext with the specified AbstractFileSystem
* as the default FS.
*/
@@ -411,6 +411,7 @@ public class FileContext implements PathCapabilities {
*
* @throws UnsupportedFileSystemException If the file system from the default
* configuration is not supported
+ * @return file context.
*/
public static FileContext getFileContext()
throws UnsupportedFileSystemException {
@@ -430,7 +431,7 @@ public class FileContext implements PathCapabilities {
/**
* Create a FileContext for specified URI using the default config.
*
- * @param defaultFsUri
+ * @param defaultFsUri defaultFsUri.
* @return a FileContext with the specified URI as the default FS.
*
* @throws UnsupportedFileSystemException If the file system for
@@ -444,8 +445,8 @@ public class FileContext implements PathCapabilities {
/**
* Create a FileContext for specified default URI using the specified config.
*
- * @param defaultFsUri
- * @param aConf
+ * @param defaultFsUri defaultFsUri.
+ * @param aConf configrution.
* @return new FileContext for specified uri
* @throws UnsupportedFileSystemException If the file system with specified is
* not supported
@@ -476,7 +477,7 @@ public class FileContext implements PathCapabilities {
* {@link #getFileContext(URI, Configuration)} instead of this one.
*
*
- * @param aConf
+ * @param aConf configration.
* @return new FileContext
* @throws UnsupportedFileSystemException If file system in the config
* is not supported
@@ -554,6 +555,7 @@ public class FileContext implements PathCapabilities {
/**
* Gets the working directory for wd-relative names (such a "foo/bar").
+ * @return the path.
*/
public Path getWorkingDirectory() {
return workingDir;
@@ -600,13 +602,14 @@ public class FileContext implements PathCapabilities {
* @throws FileNotFoundException If f does not exist
* @throws AccessControlException if access denied
* @throws IOException If an IO Error occurred
- *
+ * @throws UnresolvedLinkException If unresolved link occurred.
+ *
* Exceptions applicable to file systems accessed over RPC:
* @throws RpcClientException If an exception occurred in the RPC client
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
- *
+ *
* RuntimeExceptions:
* @throws InvalidPathException If path f is not valid
*/
@@ -620,7 +623,7 @@ public class FileContext implements PathCapabilities {
* A Fully-qualified path has scheme and authority specified and an absolute
* path.
* Use the default file system and working dir in this FileContext to qualify.
- * @param path
+ * @param path the path.
* @return qualified path
*/
public Path makeQualified(final Path path) {
@@ -759,6 +762,7 @@ public class FileContext implements PathCapabilities {
*
* Client should expect {@link FSDataOutputStreamBuilder#build()} throw the
* same exceptions as create(Path, EnumSet, CreateOpts...).
+ * @throws IOException If an I/O error occurred.
*/
public FSDataOutputStreamBuilder create(final Path f)
throws IOException {
@@ -832,6 +836,8 @@ public class FileContext implements PathCapabilities {
*
* RuntimeExceptions:
* @throws InvalidPathException If path f is invalid
+ *
+ * @return if delete success true, not false.
*/
public boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
@@ -862,6 +868,7 @@ public class FileContext implements PathCapabilities {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return input stream.
*/
public FSDataInputStream open(final Path f) throws AccessControlException,
FileNotFoundException, UnsupportedFileSystemException, IOException {
@@ -892,6 +899,7 @@ public class FileContext implements PathCapabilities {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return output stream.
*/
public FSDataInputStream open(final Path f, final int bufferSize)
throws AccessControlException, FileNotFoundException,
@@ -1001,6 +1009,7 @@ public class FileContext implements PathCapabilities {
*
* @param src path to be renamed
* @param dst new path after rename
+ * @param options rename options.
*
* @throws AccessControlException If access is denied
* @throws FileAlreadyExistsException If dst already exists and
@@ -1052,7 +1061,7 @@ public class FileContext implements PathCapabilities {
/**
* Set permission of a path.
- * @param f
+ * @param f the path.
* @param permission - the new absolute permission (umask is not applied)
*
* @throws AccessControlException If access is denied
@@ -1196,7 +1205,7 @@ public class FileContext implements PathCapabilities {
* Set the verify checksum flag for the file system denoted by the path.
* This is only applicable if the
* corresponding FileSystem supports checksum. By default doesn't do anything.
- * @param verifyChecksum
+ * @param verifyChecksum verify check sum.
* @param f set the verifyChecksum for the Filesystem containing this path
*
* @throws AccessControlException If access is denied
@@ -1251,8 +1260,9 @@ public class FileContext implements PathCapabilities {
/**
* Synchronize client metadata state.
*
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException If an I/O error occurred.
+ * @throws UnsupportedOperationException If file system for f is
+ * not supported.
*/
public void msync() throws IOException, UnsupportedOperationException {
defaultFS.msync();
@@ -1613,9 +1623,12 @@ public class FileContext implements PathCapabilities {
}
/**
+ * List CorruptFile Blocks.
+ *
+ * @param path the path.
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public RemoteIterator listCorruptFileBlocks(Path path)
throws IOException {
@@ -1739,6 +1752,7 @@ public class FileContext implements PathCapabilities {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return if f exists true, not false.
*/
public boolean exists(final Path f) throws AccessControlException,
UnsupportedFileSystemException, IOException {
@@ -1799,6 +1813,12 @@ public class FileContext implements PathCapabilities {
/**
* See {@link #listStatus(Path[], PathFilter)}
+ *
+ * @param files files.
+ * @throws AccessControlException If access is denied.
+ * @throws FileNotFoundException If files does not exist.
+ * @throws IOException If an I/O error occurred.
+ * @return file status array.
*/
public FileStatus[] listStatus(Path[] files) throws AccessControlException,
FileNotFoundException, IOException {
@@ -2054,36 +2074,29 @@ public class FileContext implements PathCapabilities {
*
?
*
Matches any single character.
*
- *
*
*
*
Matches zero or more characters.
*
- *
*
[abc]
*
Matches a single character from character set
* {a,b,c}.
*
- *
*
[a-b]
*
Matches a single character from the character range
* {a...b}. Note: character a must be
* lexicographically less than or equal to character b.
*
- *
*
[^a]
*
Matches a single char that is not from character set or range
* {a}. Note that the ^ character must occur
* immediately to the right of the opening bracket.
*
- *
*
\c
*
Removes (escapes) any special meaning of character c.
*
- *
*
{ab,cd}
*
Matches a string from the string set {ab, cd}
- *
- *
+ *
*
{ab,c{de,fh}}
*
Matches a string from string set {ab, cde, cfh}
*
@@ -2144,6 +2157,18 @@ public class FileContext implements PathCapabilities {
/**
* Copy file from src to dest. See
* {@link #copy(Path, Path, boolean, boolean)}
+ *
+ * @param src src.
+ * @param dst dst.
+ * @throws AccessControlException If access is denied.
+ * @throws FileAlreadyExistsException If file src already exists.
+ * @throws FileNotFoundException if next file does not exist any more.
+ * @throws ParentNotDirectoryException If parent of src is not a
+ * directory.
+ * @throws UnsupportedFileSystemException If file system for
+ * src/dst is not supported.
+ * @throws IOException If an I/O error occurred.
+ * @return if success copy true, not false.
*/
public boolean copy(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
@@ -2154,8 +2179,8 @@ public class FileContext implements PathCapabilities {
/**
* Copy from src to dst, optionally deleting src and overwriting dst.
- * @param src
- * @param dst
+ * @param src src.
+ * @param dst dst.
* @param deleteSource - delete src if true
* @param overwrite overwrite dst if true; throw IOException if dst exists
* and overwrite is false.
@@ -2276,7 +2301,7 @@ public class FileContext implements PathCapabilities {
* Are qualSrc and qualDst of the same file system?
* @param qualPath1 - fully qualified path
* @param qualPath2 - fully qualified path
- * @return
+ * @return is same fs true,not false.
*/
private static boolean isSameFS(Path qualPath1, Path qualPath2) {
URI srcUri = qualPath1.toUri();
@@ -2299,6 +2324,13 @@ public class FileContext implements PathCapabilities {
/**
* Resolves all symbolic links in the specified path.
* Returns the new path object.
+ *
+ * @param f the path.
+ * @throws FileNotFoundException If f does not exist.
+ * @throws UnresolvedLinkException If unresolved link occurred.
+ * @throws AccessControlException If access is denied.
+ * @throws IOException If an I/O error occurred.
+ * @return resolve path.
*/
protected Path resolve(final Path f) throws FileNotFoundException,
UnresolvedLinkException, AccessControlException, IOException {
@@ -2316,6 +2348,7 @@ public class FileContext implements PathCapabilities {
* to, but not including the final path component.
* @param f path to resolve
* @return the new path object.
+ * @throws IOException If an I/O error occurred.
*/
protected Path resolveIntermediate(final Path f) throws IOException {
return new FSLinkResolver() {
@@ -2334,7 +2367,7 @@ public class FileContext implements PathCapabilities {
* @param f
* Path which needs to be resolved
* @return List of AbstractFileSystems accessed in the path
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
Set resolveAbstractFileSystems(final Path f)
throws IOException {
@@ -2395,7 +2428,7 @@ public class FileContext implements PathCapabilities {
* @param p Path for which delegations tokens are requested.
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List> getDelegationTokens(
@@ -2547,7 +2580,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to modify
* @param name xattr name.
* @param value xattr value.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
@@ -2566,7 +2599,7 @@ public class FileContext implements PathCapabilities {
* @param name xattr name.
* @param value xattr value.
* @param flag xattr set flag
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void setXAttr(Path path, final String name, final byte[] value,
final EnumSet flag) throws IOException {
@@ -2591,7 +2624,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to get extended attribute
* @param name xattr name.
* @return byte[] xattr value.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public byte[] getXAttr(Path path, final String name) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2614,7 +2647,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to get extended attributes
* @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
* of the file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public Map getXAttrs(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2638,7 +2671,7 @@ public class FileContext implements PathCapabilities {
* @param names XAttr names.
* @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
* of the file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public Map getXAttrs(Path path, final List names)
throws IOException {
@@ -2661,7 +2694,7 @@ public class FileContext implements PathCapabilities {
*
* @param path Path to remove extended attribute
* @param name xattr name
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void removeXAttr(Path path, final String name) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2685,7 +2718,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to get extended attributes
* @return List{@literal <}String{@literal >} of the XAttr names of the
* file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public List listXAttrs(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2802,7 +2835,7 @@ public class FileContext implements PathCapabilities {
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void satisfyStoragePolicy(final Path path)
throws IOException {
@@ -2824,6 +2857,7 @@ public class FileContext implements PathCapabilities {
* @param policyName the name of the target storage policy. The list
* of supported Storage policies can be retrieved
* via {@link #getAllStoragePolicies}.
+ * @throws IOException If an I/O error occurred.
*/
public void setStoragePolicy(final Path path, final String policyName)
throws IOException {
@@ -2841,7 +2875,7 @@ public class FileContext implements PathCapabilities {
/**
* Unset the storage policy set for a given file or directory.
* @param src file or directory path.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void unsetStoragePolicy(final Path src) throws IOException {
final Path absF = fixRelativePart(src);
@@ -2860,7 +2894,7 @@ public class FileContext implements PathCapabilities {
*
* @param path file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2878,7 +2912,7 @@ public class FileContext implements PathCapabilities {
* Retrieve all the storage policies supported by this file system.
*
* @return all storage policies supported by this filesystem.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public Collection extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
index 9260b9a62c6..f50c06cec38 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
@@ -52,6 +52,7 @@ public class FileEncryptionInfo implements Serializable {
* @param keyName name of the key used for the encryption zone
* @param ezKeyVersionName name of the KeyVersion used to encrypt the
* encrypted data encryption key.
+ * @param version version.
*/
public FileEncryptionInfo(final CipherSuite suite,
final CryptoProtocolVersion version, final byte[] edek,
@@ -134,6 +135,8 @@ public class FileEncryptionInfo implements Serializable {
*
* NOTE:
* Currently this method is used by CLI for backward compatibility.
+ *
+ * @return stable string.
*/
public String toStringStable() {
StringBuilder builder = new StringBuilder("{")
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
index d7ca8f172f8..fcef578b072 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
@@ -116,6 +116,17 @@ public class FileStatus implements Writable, Comparable