From 5998a179b45f76378ad9ef0721f9ace203505ff2 Mon Sep 17 00:00:00 2001 From: Suresh Srinivas Date: Sat, 25 Aug 2012 01:23:52 +0000 Subject: [PATCH] HDFS-3844. Merge change 1377168 from trunk. Contributed by Jing Zhao. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1377176 13f79535-47bb-0310-9956-ffa450edef68 --- .../tools/RootDocProcessor.java | 1 + .../org/apache/hadoop/conf/Configuration.java | 2 + .../org/apache/hadoop/conf/Configured.java | 2 + .../hadoop/conf/ReconfigurationServlet.java | 11 ------ .../apache/hadoop/fs/AbstractFileSystem.java | 1 - .../org/apache/hadoop/fs/AvroFSInput.java | 5 +++ .../org/apache/hadoop/fs/BlockLocation.java | 1 + .../hadoop/fs/BufferedFSInputStream.java | 8 +++- .../apache/hadoop/fs/ChecksumFileSystem.java | 18 +++++++-- .../java/org/apache/hadoop/fs/ChecksumFs.java | 10 ++++- .../org/apache/hadoop/fs/ContentSummary.java | 6 +-- .../main/java/org/apache/hadoop/fs/DF.java | 1 + .../main/java/org/apache/hadoop/fs/DU.java | 4 ++ .../apache/hadoop/fs/FSDataInputStream.java | 7 ++++ .../org/apache/hadoop/fs/FSInputChecker.java | 7 ++++ .../org/apache/hadoop/fs/FSInputStream.java | 6 +++ .../org/apache/hadoop/fs/FSOutputSummer.java | 2 + .../org/apache/hadoop/fs/FileChecksum.java | 3 +- .../org/apache/hadoop/fs/FileContext.java | 26 +++++++++++++ .../java/org/apache/hadoop/fs/FileStatus.java | 5 +++ .../java/org/apache/hadoop/fs/FileSystem.java | 12 ++++-- .../java/org/apache/hadoop/fs/FileUtil.java | 2 + .../apache/hadoop/fs/FilterFileSystem.java | 38 +++++++++++++------ .../java/org/apache/hadoop/fs/FilterFs.java | 3 -- .../apache/hadoop/fs/FsServerDefaults.java | 3 ++ .../java/org/apache/hadoop/fs/FsShell.java | 1 + .../java/org/apache/hadoop/fs/FsStatus.java | 2 + .../org/apache/hadoop/fs/FsUrlConnection.java | 1 - .../hadoop/fs/FsUrlStreamHandlerFactory.java | 1 + .../java/org/apache/hadoop/fs/GlobFilter.java | 2 + .../org/apache/hadoop/fs/HarFileSystem.java | 26 +++++++++++++ .../org/apache/hadoop/fs/LocalFileSystem.java | 1 + .../apache/hadoop/fs/LocatedFileStatus.java | 3 ++ .../hadoop/fs/MD5MD5CRC32FileChecksum.java | 20 +++++----- .../java/org/apache/hadoop/fs/Options.java | 1 - .../main/java/org/apache/hadoop/fs/Path.java | 4 ++ .../apache/hadoop/fs/RawLocalFileSystem.java | 36 +++++++++++++++--- .../apache/hadoop/fs/TrashPolicyDefault.java | 1 + .../apache/hadoop/fs/ftp/FTPFileSystem.java | 1 + .../apache/hadoop/fs/ftp/FTPInputStream.java | 9 +++++ .../org/apache/hadoop/fs/kfs/KFSImpl.java | 17 +++++++++ .../apache/hadoop/fs/kfs/KFSInputStream.java | 10 +++++ .../apache/hadoop/fs/kfs/KFSOutputStream.java | 9 ++--- .../hadoop/fs/permission/FsPermission.java | 13 ++++--- .../fs/permission/PermissionStatus.java | 9 +++-- .../hadoop/fs/s3/Jets3tFileSystemStore.java | 14 +++++++ .../apache/hadoop/fs/s3/MigrationTool.java | 4 ++ .../org/apache/hadoop/fs/s3/S3FileSystem.java | 2 + .../s3native/Jets3tNativeFileSystemStore.java | 12 ++++++ .../apache/hadoop/fs/shell/CommandFormat.java | 3 ++ .../org/apache/hadoop/fs/shell/Delete.java | 2 + .../org/apache/hadoop/fs/shell/Display.java | 2 + .../org/apache/hadoop/fs/shell/FsCommand.java | 1 + .../org/apache/hadoop/fs/shell/PathData.java | 1 + .../hadoop/fs/viewfs/ChRootedFileSystem.java | 1 + .../fs/viewfs/NotInMountpointException.java | 4 -- .../hadoop/fs/viewfs/ViewFileSystem.java | 1 + .../hadoop/fs/viewfs/ViewFsFileStatus.java | 3 +- .../hadoop/ha/ActiveStandbyElector.java | 4 ++ .../apache/hadoop/ha/HAServiceProtocol.java | 1 + .../java/org/apache/hadoop/ha/NodeFencer.java | 1 + .../apache/hadoop/ha/SshFenceByTcpPort.java | 2 + .../org/apache/hadoop/http/HttpServer.java | 4 +- .../apache/hadoop/io/AbstractMapWritable.java | 6 ++- .../org/apache/hadoop/io/ArrayWritable.java | 2 + .../org/apache/hadoop/io/BooleanWritable.java | 2 + .../org/apache/hadoop/io/ByteWritable.java | 2 + .../org/apache/hadoop/io/BytesWritable.java | 4 ++ .../apache/hadoop/io/CompressedWritable.java | 2 + .../apache/hadoop/io/DataInputByteBuffer.java | 2 - .../apache/hadoop/io/DefaultStringifier.java | 3 ++ .../org/apache/hadoop/io/DoubleWritable.java | 2 + .../org/apache/hadoop/io/EnumSetWritable.java | 15 ++++---- .../org/apache/hadoop/io/FloatWritable.java | 2 + .../org/apache/hadoop/io/GenericWritable.java | 5 +++ .../java/org/apache/hadoop/io/IOUtils.java | 2 + .../org/apache/hadoop/io/IntWritable.java | 2 + .../org/apache/hadoop/io/LongWritable.java | 8 ++++ .../java/org/apache/hadoop/io/MD5Hash.java | 8 ++++ .../java/org/apache/hadoop/io/MapFile.java | 2 + .../org/apache/hadoop/io/MapWritable.java | 30 +++++++-------- .../org/apache/hadoop/io/NullWritable.java | 4 ++ .../org/apache/hadoop/io/ObjectWritable.java | 7 ++++ .../org/apache/hadoop/io/OutputBuffer.java | 1 + .../org/apache/hadoop/io/ReadaheadPool.java | 1 + .../org/apache/hadoop/io/SecureIOUtils.java | 1 - .../org/apache/hadoop/io/SequenceFile.java | 32 ++++++++++++++++ .../java/org/apache/hadoop/io/SetFile.java | 1 + .../apache/hadoop/io/SortedMapWritable.java | 38 +++++++++---------- .../org/apache/hadoop/io/Stringifier.java | 1 + .../main/java/org/apache/hadoop/io/Text.java | 8 ++++ .../apache/hadoop/io/TwoDArrayWritable.java | 2 + .../main/java/org/apache/hadoop/io/UTF8.java | 2 + .../org/apache/hadoop/io/VIntWritable.java | 2 + .../org/apache/hadoop/io/VLongWritable.java | 2 + .../hadoop/io/VersionMismatchException.java | 1 + .../apache/hadoop/io/VersionedWritable.java | 2 + .../apache/hadoop/io/WritableComparator.java | 2 + .../apache/hadoop/io/compress/BZip2Codec.java | 20 ++++++++++ .../io/compress/BlockCompressorStream.java | 3 ++ .../io/compress/BlockDecompressorStream.java | 3 ++ .../io/compress/CompressionCodecFactory.java | 1 + .../io/compress/CompressionInputStream.java | 5 +++ .../io/compress/CompressionOutputStream.java | 3 ++ .../hadoop/io/compress/CompressorStream.java | 5 +++ .../io/compress/DecompressorStream.java | 9 +++++ .../hadoop/io/compress/DefaultCodec.java | 11 ++++++ .../apache/hadoop/io/compress/GzipCodec.java | 19 ++++++++-- .../io/compress/bzip2/CBZip2InputStream.java | 3 ++ .../io/compress/bzip2/CBZip2OutputStream.java | 5 +++ .../io/compress/lz4/Lz4Decompressor.java | 1 + .../compress/snappy/SnappyDecompressor.java | 1 + .../zlib/BuiltInGzipDecompressor.java | 13 +++++-- .../io/compress/zlib/BuiltInZlibDeflater.java | 1 + .../io/compress/zlib/BuiltInZlibInflater.java | 1 + .../io/compress/zlib/ZlibCompressor.java | 10 +++++ .../io/compress/zlib/ZlibDecompressor.java | 10 +++++ .../apache/hadoop/io/file/tfile/BCFile.java | 4 ++ .../hadoop/io/file/tfile/CompareUtils.java | 1 + .../apache/hadoop/io/file/tfile/TFile.java | 3 ++ .../apache/hadoop/io/nativeio/NativeIO.java | 1 + .../hadoop/io/nativeio/NativeIOException.java | 1 + .../apache/hadoop/io/retry/RetryPolicies.java | 4 ++ .../io/serializer/DeserializerComparator.java | 1 + .../io/serializer/JavaSerialization.java | 12 ++++-- .../JavaSerializationComparator.java | 1 + .../io/serializer/WritableSerialization.java | 2 - .../io/serializer/avro/AvroSerialization.java | 2 + .../java/org/apache/hadoop/ipc/Client.java | 4 ++ .../apache/hadoop/ipc/ProtobufRpcEngine.java | 1 + .../org/apache/hadoop/ipc/ProtocolProxy.java | 1 - .../apache/hadoop/ipc/ProtocolSignature.java | 3 +- .../java/org/apache/hadoop/ipc/Server.java | 1 + .../apache/hadoop/ipc/WritableRpcEngine.java | 7 ++++ .../java/org/apache/hadoop/log/LogLevel.java | 1 + .../metrics/ganglia/GangliaContext.java | 1 - .../hadoop/metrics/spi/CompositeContext.java | 1 - .../spi/NullContextWithUpdateThread.java | 1 - .../hadoop/metrics/spi/OutputRecord.java | 4 -- .../org/apache/hadoop/metrics/spi/Util.java | 1 - .../hadoop/metrics/util/MetricsIntValue.java | 2 - .../metrics/util/MetricsTimeVaryingInt.java | 2 - .../metrics/util/MetricsTimeVaryingLong.java | 2 - .../metrics/util/MetricsTimeVaryingRate.java | 2 - .../hadoop/metrics2/impl/MetricsConfig.java | 1 - .../apache/hadoop/metrics2/sink/FileSink.java | 1 - .../hadoop/metrics2/source/JvmMetrics.java | 1 - .../net/AbstractDNSToSwitchMapping.java | 1 - .../main/java/org/apache/hadoop/net/DNS.java | 1 - .../apache/hadoop/net/NetworkTopology.java | 1 + .../apache/hadoop/net/ScriptBasedMapping.java | 1 - .../hadoop/net/SocketIOWithTimeout.java | 1 - .../apache/hadoop/net/SocketInputStream.java | 5 +++ .../apache/hadoop/net/SocketOutputStream.java | 6 +++ .../apache/hadoop/net/SocksSocketFactory.java | 11 +----- .../hadoop/net/StandardSocketFactory.java | 7 ---- .../org/apache/hadoop/net/TableMapping.java | 1 + .../hadoop/record/BinaryRecordInput.java | 17 +++++++++ .../hadoop/record/BinaryRecordOutput.java | 15 ++++++++ .../java/org/apache/hadoop/record/Buffer.java | 5 +++ .../apache/hadoop/record/CsvRecordInput.java | 16 ++++++++ .../apache/hadoop/record/CsvRecordOutput.java | 14 +++++++ .../java/org/apache/hadoop/record/Record.java | 4 ++ .../hadoop/record/RecordComparator.java | 1 + .../apache/hadoop/record/XmlRecordInput.java | 21 ++++++++++ .../apache/hadoop/record/XmlRecordOutput.java | 14 +++++++ .../hadoop/record/compiler/CGenerator.java | 1 + .../hadoop/record/compiler/CodeBuffer.java | 1 + .../apache/hadoop/record/compiler/Consts.java | 4 -- .../hadoop/record/compiler/CppGenerator.java | 1 + .../hadoop/record/compiler/JBoolean.java | 7 ++++ .../hadoop/record/compiler/JBuffer.java | 9 +++++ .../apache/hadoop/record/compiler/JByte.java | 5 +++ .../hadoop/record/compiler/JCompType.java | 5 +++ .../hadoop/record/compiler/JDouble.java | 6 +++ .../apache/hadoop/record/compiler/JFloat.java | 6 +++ .../apache/hadoop/record/compiler/JInt.java | 5 +++ .../apache/hadoop/record/compiler/JLong.java | 6 +++ .../apache/hadoop/record/compiler/JMap.java | 10 +++++ .../hadoop/record/compiler/JRecord.java | 9 +++++ .../hadoop/record/compiler/JString.java | 6 +++ .../hadoop/record/compiler/JVector.java | 10 +++++ .../hadoop/record/compiler/JavaGenerator.java | 1 + .../hadoop/record/compiler/ant/RccTask.java | 1 + .../compiler/generated/ParseException.java | 1 + .../hadoop/record/compiler/generated/Rcc.java | 1 - .../compiler/generated/RccTokenManager.java | 8 ---- .../record/compiler/generated/Token.java | 1 + .../compiler/generated/TokenMgrError.java | 1 + .../hadoop/record/meta/FieldTypeInfo.java | 2 + .../apache/hadoop/record/meta/MapTypeID.java | 5 ++- .../hadoop/record/meta/RecordTypeInfo.java | 3 ++ .../hadoop/record/meta/StructTypeID.java | 3 ++ .../org/apache/hadoop/record/meta/TypeID.java | 2 + .../hadoop/record/meta/VectorTypeID.java | 3 ++ .../security/RefreshUserMappingsProtocol.java | 1 - .../hadoop/security/SaslInputStream.java | 7 ++++ .../hadoop/security/SaslOutputStream.java | 7 +++- .../apache/hadoop/security/SaslRpcClient.java | 1 + .../apache/hadoop/security/SaslRpcServer.java | 2 - .../apache/hadoop/security/SecurityUtil.java | 2 + .../security/ShellBasedUnixGroupsMapping.java | 3 -- .../ShellBasedUnixGroupsNetgroupMapping.java | 6 --- .../hadoop/security/UserGroupInformation.java | 4 +- .../security/authorize/AccessControlList.java | 3 ++ .../security/authorize/PolicyProvider.java | 1 + .../RefreshAuthorizationPolicyProtocol.java | 1 - .../ssl/FileBasedKeyStoresFactory.java | 1 + .../security/ssl/SSLHostnameVerifier.java | 17 +++++++++ .../apache/hadoop/security/token/Token.java | 4 +- .../AbstractDelegationTokenIdentifier.java | 8 +++- .../AbstractDelegationTokenSecretManager.java | 1 + .../token/delegation/DelegationKey.java | 2 + .../hadoop/tools/GetUserMappingsProtocol.java | 1 - .../apache/hadoop/util/AsyncDiskService.java | 1 + .../org/apache/hadoop/util/DataChecksum.java | 8 ++++ .../java/org/apache/hadoop/util/HeapSort.java | 5 +-- .../java/org/apache/hadoop/util/Progress.java | 1 + .../org/apache/hadoop/util/PureJavaCrc32.java | 8 ++-- .../apache/hadoop/util/PureJavaCrc32C.java | 8 ++-- .../org/apache/hadoop/util/QuickSort.java | 5 +-- .../apache/hadoop/util/ReflectionUtils.java | 1 + .../java/org/apache/hadoop/util/Shell.java | 4 +- .../org/apache/hadoop/util/bloom/Filter.java | 2 + .../org/apache/hadoop/util/bloom/Key.java | 2 + .../apache/hadoop/util/hash/JenkinsHash.java | 1 + .../apache/hadoop/util/hash/MurmurHash.java | 1 + .../apache/hadoop/cli/util/CLICommand.java | 1 + .../apache/hadoop/cli/util/CLITestCmd.java | 6 +++ .../apache/hadoop/cli/util/FSCmdExecutor.java | 1 + .../apache/hadoop/conf/TestConfServlet.java | 1 - .../apache/hadoop/conf/TestConfiguration.java | 2 - .../conf/TestConfigurationDeprecation.java | 2 - .../hadoop/conf/TestDeprecatedKeys.java | 3 -- .../hadoop/conf/TestReconfiguration.java | 6 --- .../hadoop/fs/FSMainOperationsBaseTest.java | 2 + .../fs/FileContextMainOperationsBaseTest.java | 2 + .../hadoop/fs/FileContextPermissionBase.java | 1 + .../apache/hadoop/fs/FileContextURIBase.java | 2 - .../org/apache/hadoop/fs/TestAvroFSInput.java | 1 - .../java/org/apache/hadoop/fs/TestDU.java | 2 + .../TestFSMainOperationsLocalFileSystem.java | 3 ++ .../hadoop/fs/TestFcLocalFsPermission.java | 2 + .../apache/hadoop/fs/TestFcLocalFsUtil.java | 1 + .../hadoop/fs/TestFileSystemCaching.java | 10 +++++ .../org/apache/hadoop/fs/TestFsOptions.java | 2 - .../org/apache/hadoop/fs/TestListFiles.java | 1 - .../fs/TestLocalFSFileContextCreateMkdir.java | 1 + .../TestLocalFSFileContextMainOperations.java | 2 + .../fs/TestLocalFSFileContextSymlink.java | 5 +++ .../hadoop/fs/TestLocalFsFCStatistics.java | 3 ++ .../hadoop/fs/TestLocal_S3FileContextURI.java | 1 + .../hadoop/fs/TestS3_LocalFileContextURI.java | 1 + .../java/org/apache/hadoop/fs/TestTrash.java | 2 + .../hadoop/fs/kfs/KFSEmulationImpl.java | 17 +++++++++ .../hadoop/fs/kfs/TestKosmosFileSystem.java | 8 +--- .../fs/loadGenerator/DataGenerator.java | 1 + .../fs/loadGenerator/LoadGenerator.java | 2 + .../fs/loadGenerator/StructureGenerator.java | 1 + .../hadoop/fs/s3/InMemoryFileSystemStore.java | 14 +++++++ .../InMemoryNativeFileSystemStore.java | 12 ++++++ .../fs/viewfs/TestChRootedFileSystem.java | 1 + .../TestFSMainOperationsLocalFileSystem.java | 2 + .../fs/viewfs/TestFcCreateMkdirLocalFs.java | 2 + .../viewfs/TestFcMainOperationsLocalFs.java | 2 + .../fs/viewfs/TestFcPermissionsLocalFs.java | 2 + ...tViewFileSystemDelegationTokenSupport.java | 2 + .../TestViewFileSystemLocalFileSystem.java | 2 + ...ileSystemWithAuthorityLocalFileSystem.java | 3 ++ .../hadoop/fs/viewfs/TestViewFsLocalFs.java | 2 + .../hadoop/fs/viewfs/TestViewFsTrash.java | 2 +- .../TestViewFsWithAuthorityLocalFs.java | 3 ++ .../fs/viewfs/TestViewfsFileStatus.java | 1 - .../apache/hadoop/ha/ClientBaseWithFixes.java | 3 +- .../org/apache/hadoop/ha/DummyHAService.java | 1 + .../apache/hadoop/http/TestGlobalFilter.java | 4 ++ .../apache/hadoop/http/TestPathFilter.java | 4 ++ .../apache/hadoop/http/TestServletFilter.java | 6 ++- .../org/apache/hadoop/io/AvroTestUtil.java | 2 - .../org/apache/hadoop/io/RandomDatum.java | 3 ++ .../apache/hadoop/io/TestEnumSetWritable.java | 8 ---- .../apache/hadoop/io/TestGenericWritable.java | 6 +++ .../org/apache/hadoop/io/TestMD5Hash.java | 2 + .../apache/hadoop/io/TestSecureIOUtils.java | 3 -- .../apache/hadoop/io/TestSequenceFile.java | 2 + .../java/org/apache/hadoop/io/TestText.java | 1 + .../hadoop/io/TestVersionedWritable.java | 8 ++++ .../org/apache/hadoop/io/TestWritable.java | 3 ++ .../apache/hadoop/io/TestWritableName.java | 3 ++ .../hadoop/io/compress/TestCodecFactory.java | 13 +++++++ .../hadoop/io/file/tfile/NanoTimer.java | 1 + .../io/file/tfile/TestTFileByteArrays.java | 1 - ...eNoneCodecsJClassComparatorByteArrays.java | 3 -- .../tfile/TestTFileSeqFileComparison.java | 16 ++++++++ .../hadoop/io/nativeio/TestNativeIO.java | 1 + .../hadoop/io/retry/TestFailoverProxy.java | 1 + .../io/retry/UnreliableImplementation.java | 7 +++- .../hadoop/io/serializer/avro/Record.java | 2 + .../avro/TestAvroSerialization.java | 4 ++ .../apache/hadoop/ipc/MiniRPCBenchmark.java | 2 + .../java/org/apache/hadoop/ipc/TestIPC.java | 3 ++ .../java/org/apache/hadoop/ipc/TestRPC.java | 14 +++++++ .../hadoop/ipc/TestRPCCompatibility.java | 1 + .../org/apache/hadoop/ipc/TestSaslRPC.java | 6 +++ .../hadoop/metrics/TestMetricsServlet.java | 1 + .../metrics2/lib/TestMetricsAnnotations.java | 4 +- .../metrics2/lib/TestMetricsRegistry.java | 3 ++ .../org/apache/hadoop/net/StaticMapping.java | 2 - .../org/apache/hadoop/record/FromCpp.java | 2 + .../org/apache/hadoop/record/RecordBench.java | 2 - .../apache/hadoop/record/TestRecordIO.java | 2 + .../hadoop/record/TestRecordVersioning.java | 2 + .../java/org/apache/hadoop/record/ToCpp.java | 2 + .../security/TestAuthenticationFilter.java | 2 +- .../hadoop/security/TestCredentials.java | 3 -- .../security/TestDoAsEffectiveUser.java | 10 ++++- .../hadoop/security/TestGroupsCaching.java | 3 ++ .../hadoop/security/TestJNIGroupsMapping.java | 5 --- .../security/TestUserGroupInformation.java | 6 +++ .../authorize/TestAccessControlList.java | 3 -- .../hadoop/security/token/TestToken.java | 2 - .../token/delegation/TestDelegationToken.java | 9 ++++- .../apache/hadoop/test/GenericTestUtils.java | 1 + .../apache/hadoop/test/MetricsAsserts.java | 1 - .../hadoop/test/MultithreadedTestUtil.java | 2 + .../org/apache/hadoop/util/JarFinder.java | 1 - .../apache/hadoop/util/TestIndexedSort.java | 7 +++- .../org/apache/hadoop/util/TestOptions.java | 1 - .../apache/hadoop/util/TestPureJavaCrc32.java | 2 +- .../hadoop/util/TestReflectionUtils.java | 3 +- .../org/apache/hadoop/util/TestRunJar.java | 2 + .../org/apache/hadoop/util/TestShell.java | 3 +- .../fs/http/client/HttpFSFileSystem.java | 1 - .../client/HttpFSKerberosAuthenticator.java | 3 -- .../HttpFSKerberosAuthenticationHandler.java | 3 -- .../http/server/HttpFSParametersProvider.java | 1 - .../hadoop/lib/lang/RunnableCallable.java | 1 + .../hadoop/FileSystemAccessService.java | 2 + .../InstrumentationService.java | 5 +++ .../service/scheduler/SchedulerService.java | 1 + .../hadoop/lib/servlet/ServerWebApp.java | 2 + .../apache/hadoop/lib/wsrs/BooleanParam.java | 1 + .../org/apache/hadoop/lib/wsrs/ByteParam.java | 1 + .../org/apache/hadoop/lib/wsrs/EnumParam.java | 1 + .../apache/hadoop/lib/wsrs/IntegerParam.java | 1 + .../org/apache/hadoop/lib/wsrs/LongParam.java | 1 + .../org/apache/hadoop/lib/wsrs/Param.java | 1 + .../apache/hadoop/lib/wsrs/ShortParam.java | 1 + .../apache/hadoop/lib/wsrs/StringParam.java | 2 + .../TestHttpFSFileSystemLocalFileSystem.java | 3 ++ .../TestHttpFSWithHttpFSFileSystem.java | 4 ++ ...stHttpFSKerberosAuthenticationHandler.java | 1 - .../apache/hadoop/test/TestHFSTestCase.java | 3 ++ .../org/apache/hadoop/test/TestHTestCase.java | 3 ++ .../apache/hadoop/test/TestHdfsHelper.java | 1 + hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../main/java/org/apache/hadoop/fs/Hdfs.java | 3 -- .../apache/hadoop/hdfs/HsftpFileSystem.java | 1 - .../hadoop/hdfs/RemoteBlockReader2.java | 4 -- .../token/block/BlockTokenIdentifier.java | 2 - .../server/blockmanagement/BlockManager.java | 1 - .../blockmanagement/DatanodeManager.java | 1 - .../blockmanagement/HeartbeatManager.java | 1 - .../PendingDataNodeMessages.java | 4 -- .../hadoop/hdfs/server/common/JspHelper.java | 3 -- .../datanode/SecureDataNodeStarter.java | 2 - .../server/namenode/EditLogInputStream.java | 3 -- .../hdfs/server/namenode/FSEditLogLoader.java | 2 - .../hdfs/server/namenode/FSEditLogOp.java | 1 - .../hdfs/server/namenode/FSImageFormat.java | 1 - .../server/namenode/FSImageSerialization.java | 1 - .../server/namenode/NameNodeHttpServer.java | 6 --- .../server/namenode/SerialNumberManager.java | 2 +- .../StatisticsEditsVisitor.java | 1 - .../ImageLoaderCurrent.java | 2 - .../hadoop/hdfs/util/CyclicIteration.java | 3 -- .../hadoop/fi/DataTransferTestUtil.java | 1 - .../apache/hadoop/hdfs/PipelinesTestUtil.java | 3 -- .../org/apache/hadoop/hdfs/TestDFSMkdirs.java | 3 -- .../TestSecondaryNameNodeUpgrade.java | 6 --- 380 files changed, 1289 insertions(+), 375 deletions(-) diff --git a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java index 2783bf3b308..a6ce035fa9b 100644 --- a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java +++ b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java @@ -97,6 +97,7 @@ class RootDocProcessor { this.target = target; } + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String methodName = method.getName(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 516ca1c8e12..055780e36d3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1809,6 +1809,7 @@ public class Configuration implements Iterable>, * * @return an iterator over the entries. */ + @Override public Iterator> iterator() { // Get a copy of just the string to string pairs. After the old object // methods that allow non-strings to be put into configurations are removed, @@ -2234,6 +2235,7 @@ public class Configuration implements Iterable>, } //@Override + @Override public void write(DataOutput out) throws IOException { Properties props = getProps(); WritableUtils.writeVInt(out, props.size()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java index 2bc7e537e4e..f06af2b98df 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java @@ -39,11 +39,13 @@ public class Configured implements Configurable { } // inherit javadoc + @Override public void setConf(Configuration conf) { this.conf = conf; } // inherit javadoc + @Override public Configuration getConf() { return conf; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java index 041b263edd9..452d29f7b7a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java @@ -23,12 +23,10 @@ import org.apache.commons.logging.*; import org.apache.commons.lang.StringEscapeUtils; import java.util.Collection; -import java.util.Map; import java.util.Enumeration; import java.io.IOException; import java.io.PrintWriter; -import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; @@ -57,9 +55,6 @@ public class ReconfigurationServlet extends HttpServlet { public static final String CONF_SERVLET_RECONFIGURABLE_PREFIX = "conf.servlet.reconfigurable."; - /** - * {@inheritDoc} - */ @Override public void init() throws ServletException { super.init(); @@ -202,9 +197,6 @@ public class ReconfigurationServlet extends HttpServlet { } } - /** - * {@inheritDoc} - */ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { @@ -219,9 +211,6 @@ public class ReconfigurationServlet extends HttpServlet { printFooter(out); } - /** - * {@inheritDoc} - */ @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index d9eda445800..6adbeab60a0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -47,7 +47,6 @@ import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java index a319fb7b364..b4a4a85674d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java @@ -45,22 +45,27 @@ public class AvroFSInput implements Closeable, SeekableInput { this.stream = fc.open(p); } + @Override public long length() { return len; } + @Override public int read(byte[] b, int off, int len) throws IOException { return stream.read(b, off, len); } + @Override public void seek(long p) throws IOException { stream.seek(p); } + @Override public long tell() throws IOException { return stream.getPos(); } + @Override public void close() throws IOException { stream.close(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java index cfe9ee8c660..fa095343c51 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java @@ -204,6 +204,7 @@ public class BlockLocation { } } + @Override public String toString() { StringBuilder result = new StringBuilder(); result.append(offset); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java index f3229240125..745148281d2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java @@ -19,7 +19,6 @@ package org.apache.hadoop.fs; import java.io.BufferedInputStream; import java.io.FileDescriptor; -import java.io.FileInputStream; import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; @@ -50,10 +49,12 @@ implements Seekable, PositionedReadable, HasFileDescriptor { super(in, size); } + @Override public long getPos() throws IOException { return ((FSInputStream)in).getPos()-(count-pos); } + @Override public long skip(long n) throws IOException { if (n <= 0) { return 0; @@ -63,6 +64,7 @@ implements Seekable, PositionedReadable, HasFileDescriptor { return n; } + @Override public void seek(long pos) throws IOException { if( pos<0 ) { return; @@ -82,20 +84,24 @@ implements Seekable, PositionedReadable, HasFileDescriptor { ((FSInputStream)in).seek(pos); } + @Override public boolean seekToNewSource(long targetPos) throws IOException { pos = 0; count = 0; return ((FSInputStream)in).seekToNewSource(targetPos); } + @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { return ((FSInputStream)in).read(position, buffer, offset, length) ; } + @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { ((FSInputStream)in).readFully(position, buffer, offset, length); } + @Override public void readFully(long position, byte[] buffer) throws IOException { ((FSInputStream)in).readFully(position, buffer); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 17707718b82..42ee8702688 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -53,6 +53,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { super(fs); } + @Override public void setConf(Configuration conf) { super.setConf(conf); if (conf != null) { @@ -64,6 +65,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { /** * Set whether to verify checksum. */ + @Override public void setVerifyChecksum(boolean verifyChecksum) { this.verifyChecksum = verifyChecksum; } @@ -74,6 +76,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { } /** get the raw file system */ + @Override public FileSystem getRawFileSystem() { return fs; } @@ -162,14 +165,17 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { return HEADER_LENGTH + 4*(dataPos/bytesPerSum); } + @Override protected long getChunkPosition( long dataPos ) { return dataPos/bytesPerSum*bytesPerSum; } + @Override public int available() throws IOException { return datas.available() + super.available(); } + @Override public int read(long position, byte[] b, int off, int len) throws IOException { // parameter check @@ -190,6 +196,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { return nread; } + @Override public void close() throws IOException { datas.close(); if( sums != null ) { @@ -290,6 +297,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { * @exception IOException if an I/O error occurs. * ChecksumException if the chunk to skip to is corrupted */ + @Override public synchronized long skip(long n) throws IOException { long curPos = getPos(); long fileLength = getFileLength(); @@ -311,6 +319,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { * ChecksumException if the chunk to seek to is corrupted */ + @Override public synchronized void seek(long pos) throws IOException { if(pos>getFileLength()) { throw new IOException("Cannot seek after EOF"); @@ -339,7 +348,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { return new FSDataBoundedInputStream(fs, f, in); } - /** {@inheritDoc} */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new IOException("Not supported"); @@ -398,6 +407,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { sums.writeInt(bytesPerSum); } + @Override public void close() throws IOException { flushBuffer(); sums.close(); @@ -412,7 +422,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { } } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -454,7 +463,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { return out; } - /** {@inheritDoc} */ @Override public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -472,6 +480,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { * @return true if successful; * false if file does not exist or is a directory */ + @Override public boolean setReplication(Path src, short replication) throws IOException { boolean value = fs.setReplication(src, replication); if (!value) @@ -487,6 +496,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { /** * Rename files/dirs */ + @Override public boolean rename(Path src, Path dst) throws IOException { if (fs.isDirectory(src)) { return fs.rename(src, dst); @@ -516,6 +526,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { * Implement the delete(Path, boolean) in checksum * file system. */ + @Override public boolean delete(Path f, boolean recursive) throws IOException{ FileStatus fstatus = null; try { @@ -538,6 +549,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { } final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { return !isChecksumFile(file); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index 47849919827..12805d86a6b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -32,7 +32,6 @@ import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.PureJavaCrc32; -import org.apache.hadoop.util.StringUtils; /** * Abstract Checksumed Fs. @@ -61,6 +60,7 @@ public abstract class ChecksumFs extends FilterFs { /** * Set whether to verify checksum. */ + @Override public void setVerifyChecksum(boolean inVerifyChecksum) { this.verifyChecksum = inVerifyChecksum; } @@ -152,14 +152,17 @@ public abstract class ChecksumFs extends FilterFs { return HEADER_LENGTH + 4*(dataPos/bytesPerSum); } + @Override protected long getChunkPosition(long dataPos) { return dataPos/bytesPerSum*bytesPerSum; } + @Override public int available() throws IOException { return datas.available() + super.available(); } + @Override public int read(long position, byte[] b, int off, int len) throws IOException, UnresolvedLinkException { // parameter check @@ -180,6 +183,7 @@ public abstract class ChecksumFs extends FilterFs { return nread; } + @Override public void close() throws IOException { datas.close(); if (sums != null) { @@ -258,6 +262,7 @@ public abstract class ChecksumFs extends FilterFs { * @exception IOException if an I/O error occurs. * ChecksumException if the chunk to skip to is corrupted */ + @Override public synchronized long skip(long n) throws IOException { final long curPos = getPos(); final long fileLength = getFileLength(); @@ -279,6 +284,7 @@ public abstract class ChecksumFs extends FilterFs { * ChecksumException if the chunk to seek to is corrupted */ + @Override public synchronized void seek(long pos) throws IOException { if (pos>getFileLength()) { throw new IOException("Cannot seek after EOF"); @@ -348,6 +354,7 @@ public abstract class ChecksumFs extends FilterFs { sums.writeInt(bytesPerSum); } + @Override public void close() throws IOException { flushBuffer(); sums.close(); @@ -447,6 +454,7 @@ public abstract class ChecksumFs extends FilterFs { * Implement the delete(Path, boolean) in checksum * file system. */ + @Override public boolean delete(Path f, boolean recursive) throws IOException, UnresolvedLinkException { FileStatus fstatus = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java index c0ab82de1dd..0d685b43e1f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java @@ -75,7 +75,7 @@ public class ContentSummary implements Writable{ /** Returns (disk) space quota */ public long getSpaceQuota() {return spaceQuota;} - /** {@inheritDoc} */ + @Override @InterfaceAudience.Private public void write(DataOutput out) throws IOException { out.writeLong(length); @@ -86,7 +86,7 @@ public class ContentSummary implements Writable{ out.writeLong(spaceQuota); } - /** {@inheritDoc} */ + @Override @InterfaceAudience.Private public void readFields(DataInput in) throws IOException { this.length = in.readLong(); @@ -131,7 +131,7 @@ public class ContentSummary implements Writable{ return qOption ? QUOTA_HEADER : HEADER; } - /** {@inheritDoc} */ + @Override public String toString() { return toString(true); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java index 9949834222e..c552f331f88 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java @@ -131,6 +131,7 @@ public class DF extends Shell { return mount; } + @Override public String toString() { return "df -k " + mount +"\n" + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java index 5caec7204d6..2c96b0abaf0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java @@ -76,6 +76,7 @@ public class DU extends Shell { **/ class DURefreshThread implements Runnable { + @Override public void run() { while(shouldRun) { @@ -169,16 +170,19 @@ public class DU extends Shell { } } + @Override public String toString() { return "du -sk " + dirPath +"\n" + used + "\t" + dirPath; } + @Override protected String[] getExecString() { return new String[] {"du", "-sk", dirPath}; } + @Override protected void parseExecResult(BufferedReader lines) throws IOException { String line = lines.readLine(); if (line == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java index e47dffb082c..eef53140c33 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java @@ -44,6 +44,7 @@ public class FSDataInputStream extends DataInputStream * * @param desired offset to seek to */ + @Override public synchronized void seek(long desired) throws IOException { ((Seekable)in).seek(desired); } @@ -53,6 +54,7 @@ public class FSDataInputStream extends DataInputStream * * @return current position in the input stream */ + @Override public long getPos() throws IOException { return ((Seekable)in).getPos(); } @@ -68,6 +70,7 @@ public class FSDataInputStream extends DataInputStream * if there is no more data because the end of the stream has been * reached */ + @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { return ((PositionedReadable)in).read(position, buffer, offset, length); @@ -85,6 +88,7 @@ public class FSDataInputStream extends DataInputStream * If an exception is thrown an undetermined number * of bytes in the buffer may have been written. */ + @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { ((PositionedReadable)in).readFully(position, buffer, offset, length); @@ -93,6 +97,7 @@ public class FSDataInputStream extends DataInputStream /** * See {@link #readFully(long, byte[], int, int)}. */ + @Override public void readFully(long position, byte[] buffer) throws IOException { ((PositionedReadable)in).readFully(position, buffer, 0, buffer.length); @@ -104,6 +109,7 @@ public class FSDataInputStream extends DataInputStream * @param targetPos position to seek to * @return true if a new source is found, false otherwise */ + @Override public boolean seekToNewSource(long targetPos) throws IOException { return ((Seekable)in).seekToNewSource(targetPos); } @@ -118,6 +124,7 @@ public class FSDataInputStream extends DataInputStream return in; } + @Override public int read(ByteBuffer buf) throws IOException { if (in instanceof ByteBufferReadable) { return ((ByteBufferReadable)in).read(buf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index 9974f27e247..cc992e7c941 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -140,6 +140,7 @@ abstract public class FSInputChecker extends FSInputStream { * @exception IOException if an I/O error occurs. */ + @Override public synchronized int read() throws IOException { if (pos >= count) { fill(); @@ -180,6 +181,7 @@ abstract public class FSInputChecker extends FSInputStream { * @exception IOException if an I/O error occurs. * ChecksumException if any checksum error occurs */ + @Override public synchronized int read(byte[] b, int off, int len) throws IOException { // parameter check if ((off | len | (off + len) | (b.length - (off + len))) < 0) { @@ -367,6 +369,7 @@ abstract public class FSInputChecker extends FSInputStream { * @exception IOException if an I/O error occurs. * ChecksumException if the chunk to skip to is corrupted */ + @Override public synchronized long skip(long n) throws IOException { if (n <= 0) { return 0; @@ -389,6 +392,7 @@ abstract public class FSInputChecker extends FSInputStream { * ChecksumException if the chunk to seek to is corrupted */ + @Override public synchronized void seek(long pos) throws IOException { if( pos<0 ) { return; @@ -462,13 +466,16 @@ abstract public class FSInputChecker extends FSInputStream { this.pos = 0; } + @Override final public boolean markSupported() { return false; } + @Override final public void mark(int readlimit) { } + @Override final public void reset() throws IOException { throw new IOException("mark/reset not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java index f7bc22159d9..8d668feeaba 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java @@ -36,19 +36,23 @@ public abstract class FSInputStream extends InputStream * The next read() will be from that location. Can't * seek past the end of the file. */ + @Override public abstract void seek(long pos) throws IOException; /** * Return the current offset from the start of the file */ + @Override public abstract long getPos() throws IOException; /** * Seeks a different copy of the data. Returns true if * found a new source, false otherwise. */ + @Override public abstract boolean seekToNewSource(long targetPos) throws IOException; + @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { synchronized (this) { @@ -64,6 +68,7 @@ public abstract class FSInputStream extends InputStream } } + @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { int nread = 0; @@ -76,6 +81,7 @@ public abstract class FSInputStream extends InputStream } } + @Override public void readFully(long position, byte[] buffer) throws IOException { readFully(position, buffer, 0, buffer.length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java index 66b6a749161..d494f30de78 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java @@ -55,6 +55,7 @@ abstract public class FSOutputSummer extends OutputStream { throws IOException; /** Write one byte */ + @Override public synchronized void write(int b) throws IOException { sum.update(b); buf[count++] = (byte)b; @@ -81,6 +82,7 @@ abstract public class FSOutputSummer extends OutputStream { * @param len the number of bytes to write. * @exception IOException if an I/O error occurs. */ + @Override public synchronized void write(byte b[], int off, int len) throws IOException { if (off < 0 || len < 0 || off > b.length - len) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java index 2b248bdcf26..149a3e3a4a0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java @@ -37,6 +37,7 @@ public abstract class FileChecksum implements Writable { public abstract byte[] getBytes(); /** Return true if both the algorithms and the values are the same. */ + @Override public boolean equals(Object other) { if (other == this) { return true; @@ -50,7 +51,7 @@ public abstract class FileChecksum implements Writable { && Arrays.equals(this.getBytes(), that.getBytes()); } - /** {@inheritDoc} */ + @Override public int hashCode() { return getAlgorithmName().hashCode() ^ Arrays.hashCode(getBytes()); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 4e5057a4e9b..5cfce9b019c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -190,6 +190,7 @@ public final class FileContext { new FileContextFinalizer(); private static final PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(final Path file) { return true; } @@ -318,6 +319,7 @@ public final class FileContext { throws UnsupportedFileSystemException, IOException { try { return user.doAs(new PrivilegedExceptionAction() { + @Override public AbstractFileSystem run() throws UnsupportedFileSystemException { return AbstractFileSystem.get(uri, conf); } @@ -660,6 +662,7 @@ public final class FileContext { final CreateOpts[] updatedOpts = CreateOpts.setOpt(CreateOpts.perms(permission), opts); return new FSLinkResolver() { + @Override public FSDataOutputStream next(final AbstractFileSystem fs, final Path p) throws IOException { return fs.create(p, createFlag, updatedOpts); @@ -703,6 +706,7 @@ public final class FileContext { final FsPermission absFerms = (permission == null ? FsPermission.getDefault() : permission).applyUMask(umask); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.mkdir(p, absFerms, createParent); @@ -738,6 +742,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public Boolean next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return Boolean.valueOf(fs.delete(p, recursive)); @@ -766,6 +771,7 @@ public final class FileContext { FileNotFoundException, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FSDataInputStream next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.open(p); @@ -796,6 +802,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FSDataInputStream next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.open(p, bufferSize); @@ -826,6 +833,7 @@ public final class FileContext { IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public Boolean next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return Boolean.valueOf(fs.setReplication(p, replication)); @@ -894,6 +902,7 @@ public final class FileContext { */ final Path source = resolveIntermediate(absSrc); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.rename(source, p, options); @@ -925,6 +934,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.setPermission(p, permission); @@ -967,6 +977,7 @@ public final class FileContext { } final Path absF = fixRelativePart(f); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.setOwner(p, username, groupname); @@ -1002,6 +1013,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.setTimes(p, mtime, atime); @@ -1034,6 +1046,7 @@ public final class FileContext { IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileChecksum next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileChecksum(p); @@ -1089,6 +1102,7 @@ public final class FileContext { FileNotFoundException, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileStatus(p); @@ -1135,6 +1149,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { FileStatus fi = fs.getFileLinkStatus(p); @@ -1165,6 +1180,7 @@ public final class FileContext { FileNotFoundException, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public Path next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { FileStatus fi = fs.getFileLinkStatus(p); @@ -1208,6 +1224,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public BlockLocation[] next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileBlockLocations(p, start, len); @@ -1246,6 +1263,7 @@ public final class FileContext { } final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FsStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFsStatus(p); @@ -1339,6 +1357,7 @@ public final class FileContext { IOException { final Path nonRelLink = fixRelativePart(link); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.createSymlink(target, p, createParent); @@ -1373,6 +1392,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver>() { + @Override public RemoteIterator next( final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { @@ -1432,6 +1452,7 @@ public final class FileContext { UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver>() { + @Override public RemoteIterator next( final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { @@ -1703,6 +1724,7 @@ public final class FileContext { IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileStatus[] next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.listStatus(p); @@ -2232,6 +2254,7 @@ public final class FileContext { * Deletes all the paths in deleteOnExit on JVM shutdown. */ static class FileContextFinalizer implements Runnable { + @Override public synchronized void run() { processDeleteOnExit(); } @@ -2244,6 +2267,7 @@ public final class FileContext { protected Path resolve(final Path f) throws FileNotFoundException, UnresolvedLinkException, AccessControlException, IOException { return new FSLinkResolver() { + @Override public Path next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.resolvePath(p); @@ -2259,6 +2283,7 @@ public final class FileContext { */ protected Path resolveIntermediate(final Path f) throws IOException { return new FSLinkResolver() { + @Override public FileStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileLinkStatus(p); @@ -2281,6 +2306,7 @@ public final class FileContext { final HashSet result = new HashSet(); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { result.add(fs); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java index 7947b463e16..3cba9aed386 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java @@ -253,6 +253,7 @@ public class FileStatus implements Writable, Comparable { ////////////////////////////////////////////////// // Writable ////////////////////////////////////////////////// + @Override public void write(DataOutput out) throws IOException { Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN); out.writeLong(getLen()); @@ -270,6 +271,7 @@ public class FileStatus implements Writable, Comparable { } } + @Override public void readFields(DataInput in) throws IOException { String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN); this.path = new Path(strPath); @@ -299,6 +301,7 @@ public class FileStatus implements Writable, Comparable { * @throws ClassCastException if the specified object's is not of * type FileStatus */ + @Override public int compareTo(Object o) { FileStatus other = (FileStatus)o; return this.getPath().compareTo(other.getPath()); @@ -308,6 +311,7 @@ public class FileStatus implements Writable, Comparable { * @param o the object to be compared. * @return true if two file status has the same path name; false if not. */ + @Override public boolean equals(Object o) { if (o == null) { return false; @@ -328,6 +332,7 @@ public class FileStatus implements Writable, Comparable { * * @return a hash code value for the path name. */ + @Override public int hashCode() { return getPath().hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 31b59439a96..ff9f2db1ffd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -147,6 +147,7 @@ public abstract class FileSystem extends Configured implements Closeable { UserGroupInformation ugi = UserGroupInformation.getBestUGI(ticketCachePath, user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws IOException { return get(uri, conf); } @@ -332,6 +333,7 @@ public abstract class FileSystem extends Configured implements Closeable { UserGroupInformation ugi = UserGroupInformation.getBestUGI(ticketCachePath, user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws IOException { return newInstance(uri,conf); } @@ -1389,6 +1391,7 @@ public abstract class FileSystem extends Configured implements Closeable { } final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { return true; } @@ -2056,6 +2059,7 @@ public abstract class FileSystem extends Configured implements Closeable { * No more filesystem operations are needed. Will * release any held locks. */ + @Override public void close() throws IOException { // delete all files that were marked as delete-on-exit. processDeleteOnExit(); @@ -2393,6 +2397,7 @@ public abstract class FileSystem extends Configured implements Closeable { } private class ClientFinalizer implements Runnable { + @Override public synchronized void run() { try { closeAll(true); @@ -2447,7 +2452,7 @@ public abstract class FileSystem extends Configured implements Closeable { this.ugi = UserGroupInformation.getCurrentUser(); } - /** {@inheritDoc} */ + @Override public int hashCode() { return (scheme + authority).hashCode() + ugi.hashCode() + (int)unique; } @@ -2456,7 +2461,7 @@ public abstract class FileSystem extends Configured implements Closeable { return a == b || (a != null && a.equals(b)); } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (obj == this) { return true; @@ -2471,7 +2476,7 @@ public abstract class FileSystem extends Configured implements Closeable { return false; } - /** {@inheritDoc} */ + @Override public String toString() { return "("+ugi.toString() + ")@" + scheme + "://" + authority; } @@ -2584,6 +2589,7 @@ public abstract class FileSystem extends Configured implements Closeable { return writeOps.get(); } + @Override public String toString() { return bytesRead + " bytes read, " + bytesWritten + " bytes written, " + readOps + " read ops, " + largeReadOps + " large read ops, " diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index ba9bb4eafee..b6a2acae491 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -414,9 +414,11 @@ public class FileUtil { String getResult() throws IOException { return result; } + @Override protected String[] getExecString() { return command; } + @Override protected void parseExecResult(BufferedReader lines) throws IOException { String line = lines.readLine(); if (line == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index c2ecd20b5a4..6e1e099cb0e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -76,6 +76,7 @@ public class FilterFileSystem extends FileSystem { * for this FileSystem * @param conf the configuration */ + @Override public void initialize(URI name, Configuration conf) throws IOException { super.initialize(name, conf); // this is less than ideal, but existing filesystems sometimes neglect @@ -90,6 +91,7 @@ public class FilterFileSystem extends FileSystem { } /** Returns a URI whose scheme and authority identify this FileSystem.*/ + @Override public URI getUri() { return fs.getUri(); } @@ -104,6 +106,7 @@ public class FilterFileSystem extends FileSystem { } /** Make sure that a path specifies a FileSystem. */ + @Override public Path makeQualified(Path path) { Path fqPath = fs.makeQualified(path); // swap in our scheme if the filtered fs is using a different scheme @@ -125,10 +128,12 @@ public class FilterFileSystem extends FileSystem { /////////////////////////////////////////////////////////////// /** Check that a Path belongs to this FileSystem. */ + @Override protected void checkPath(Path path) { fs.checkPath(path); } + @Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { return fs.getFileBlockLocations(file, start, len); @@ -143,17 +148,17 @@ public class FilterFileSystem extends FileSystem { * @param f the file name to open * @param bufferSize the size of the buffer to be used. */ + @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { return fs.open(f, bufferSize); } - /** {@inheritDoc} */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { return fs.append(f, bufferSize, progress); } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -171,6 +176,7 @@ public class FilterFileSystem extends FileSystem { * @return true if successful; * false if file does not exist or is a directory */ + @Override public boolean setReplication(Path src, short replication) throws IOException { return fs.setReplication(src, replication); } @@ -179,23 +185,23 @@ public class FilterFileSystem extends FileSystem { * Renames Path src to Path dst. Can take place on local fs * or remote DFS. */ + @Override public boolean rename(Path src, Path dst) throws IOException { return fs.rename(src, dst); } /** Delete a file */ + @Override public boolean delete(Path f, boolean recursive) throws IOException { return fs.delete(f, recursive); } /** List files in a directory. */ + @Override public FileStatus[] listStatus(Path f) throws IOException { return fs.listStatus(f); } - /** - * {@inheritDoc} - */ @Override public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { @@ -203,11 +209,13 @@ public class FilterFileSystem extends FileSystem { } /** List files and its block locations in a directory. */ + @Override public RemoteIterator listLocatedStatus(Path f) throws IOException { return fs.listLocatedStatus(f); } + @Override public Path getHomeDirectory() { return fs.getHomeDirectory(); } @@ -219,6 +227,7 @@ public class FilterFileSystem extends FileSystem { * * @param newDir */ + @Override public void setWorkingDirectory(Path newDir) { fs.setWorkingDirectory(newDir); } @@ -228,21 +237,21 @@ public class FilterFileSystem extends FileSystem { * * @return the directory pathname */ + @Override public Path getWorkingDirectory() { return fs.getWorkingDirectory(); } + @Override protected Path getInitialWorkingDirectory() { return fs.getInitialWorkingDirectory(); } - /** {@inheritDoc} */ @Override public FsStatus getStatus(Path p) throws IOException { return fs.getStatus(p); } - /** {@inheritDoc} */ @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { return fs.mkdirs(f, permission); @@ -254,6 +263,7 @@ public class FilterFileSystem extends FileSystem { * the given dst name. * delSrc indicates if the source should be removed */ + @Override public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { fs.copyFromLocalFile(delSrc, src, dst); @@ -264,6 +274,7 @@ public class FilterFileSystem extends FileSystem { * the given dst name. * delSrc indicates if the source should be removed */ + @Override public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path[] srcs, Path dst) throws IOException { @@ -275,6 +286,7 @@ public class FilterFileSystem extends FileSystem { * the given dst name. * delSrc indicates if the source should be removed */ + @Override public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path src, Path dst) throws IOException { @@ -286,6 +298,7 @@ public class FilterFileSystem extends FileSystem { * Copy it from FS control to the local dst name. * delSrc indicates if the src will be removed or not. */ + @Override public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws IOException { fs.copyToLocalFile(delSrc, src, dst); @@ -297,6 +310,7 @@ public class FilterFileSystem extends FileSystem { * file. If the FS is local, we write directly into the target. If * the FS is remote, we write into the tmp local area. */ + @Override public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { return fs.startLocalOutput(fsOutputFile, tmpLocalFile); @@ -308,12 +322,14 @@ public class FilterFileSystem extends FileSystem { * FS will copy the contents of tmpLocalFile to the correct target at * fsOutputFile. */ + @Override public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { fs.completeLocalOutput(fsOutputFile, tmpLocalFile); } /** Return the total size of all files in the filesystem.*/ + @Override public long getUsed() throws IOException{ return fs.getUsed(); } @@ -357,16 +373,17 @@ public class FilterFileSystem extends FileSystem { /** * Get file status. */ + @Override public FileStatus getFileStatus(Path f) throws IOException { return fs.getFileStatus(f); } - /** {@inheritDoc} */ + @Override public FileChecksum getFileChecksum(Path f) throws IOException { return fs.getFileChecksum(f); } - /** {@inheritDoc} */ + @Override public void setVerifyChecksum(boolean verifyChecksum) { fs.setVerifyChecksum(verifyChecksum); } @@ -387,21 +404,18 @@ public class FilterFileSystem extends FileSystem { fs.close(); } - /** {@inheritDoc} */ @Override public void setOwner(Path p, String username, String groupname ) throws IOException { fs.setOwner(p, username, groupname); } - /** {@inheritDoc} */ @Override public void setTimes(Path p, long mtime, long atime ) throws IOException { fs.setTimes(p, mtime, atime); } - /** {@inheritDoc} */ @Override public void setPermission(Path p, FsPermission permission ) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java index 6cfc11b1faa..9637b6b913a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java @@ -174,9 +174,6 @@ public abstract class FilterFs extends AbstractFileSystem { return myFs.listStatus(f); } - /** - * {@inheritDoc} - */ @Override public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java index 637697b83df..c1b9071bbcd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java @@ -39,6 +39,7 @@ public class FsServerDefaults implements Writable { static { // register a ctor WritableFactories.setFactory(FsServerDefaults.class, new WritableFactory() { + @Override public Writable newInstance() { return new FsServerDefaults(); } @@ -106,6 +107,7 @@ public class FsServerDefaults implements Writable { // ///////////////////////////////////////// // Writable // ///////////////////////////////////////// + @Override @InterfaceAudience.Private public void write(DataOutput out) throws IOException { out.writeLong(blockSize); @@ -116,6 +118,7 @@ public class FsServerDefaults implements Writable { WritableUtils.writeEnum(out, checksumType); } + @Override @InterfaceAudience.Private public void readFields(DataInput in) throws IOException { blockSize = in.readLong(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java index 4da32789e5c..0db1f9e4317 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java @@ -236,6 +236,7 @@ public class FsShell extends Configured implements Tool { /** * run */ + @Override public int run(String argv[]) throws Exception { // initialize FsShell init(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java index 8b9de78fe00..d392c7d765d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java @@ -60,12 +60,14 @@ public class FsStatus implements Writable { ////////////////////////////////////////////////// // Writable ////////////////////////////////////////////////// + @Override public void write(DataOutput out) throws IOException { out.writeLong(capacity); out.writeLong(used); out.writeLong(remaining); } + @Override public void readFields(DataInput in) throws IOException { capacity = in.readLong(); used = in.readLong(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java index 65c608ddecc..90e75b0ccb9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java @@ -53,7 +53,6 @@ class FsUrlConnection extends URLConnection { } } - /* @inheritDoc */ @Override public InputStream getInputStream() throws IOException { if (is == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java index b9a5f1a2cc0..2a9208ea5bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java @@ -59,6 +59,7 @@ public class FsUrlStreamHandlerFactory implements this.handler = new FsUrlStreamHandler(this.conf); } + @Override public java.net.URLStreamHandler createURLStreamHandler(String protocol) { if (!protocols.containsKey(protocol)) { boolean known = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java index 5afa9e911d7..24bff5f9cf0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java @@ -31,6 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability; @InterfaceStability.Evolving public class GlobFilter implements PathFilter { private final static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { return true; } @@ -75,6 +76,7 @@ public class GlobFilter implements PathFilter { return pattern.hasWildcard(); } + @Override public boolean accept(Path path) { return pattern.matches(path.getName()) && userFilter.accept(path); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java index 8e03fc35a91..9504e1fda64 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java @@ -106,6 +106,7 @@ public class HarFileSystem extends FilterFileSystem { * har:///archivepath. This assumes the underlying filesystem * to be used in case not specified. */ + @Override public void initialize(URI name, Configuration conf) throws IOException { // decode the name URI underLyingURI = decodeHarURI(name, conf); @@ -247,6 +248,7 @@ public class HarFileSystem extends FilterFileSystem { /** * return the top level archive. */ + @Override public Path getWorkingDirectory() { return new Path(uri.toString()); } @@ -636,6 +638,7 @@ public class HarFileSystem extends FilterFileSystem { /** * @return null since no checksum algorithm is implemented. */ + @Override public FileChecksum getFileChecksum(Path f) { return null; } @@ -668,6 +671,7 @@ public class HarFileSystem extends FilterFileSystem { throw new IOException("Har: Create not allowed"); } + @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, @@ -735,10 +739,12 @@ public class HarFileSystem extends FilterFileSystem { /** * return the top level archive path. */ + @Override public Path getHomeDirectory() { return new Path(uri.toString()); } + @Override public void setWorkingDirectory(Path newDir) { //does nothing. } @@ -746,6 +752,7 @@ public class HarFileSystem extends FilterFileSystem { /** * not implemented. */ + @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { throw new IOException("Har: mkdirs not allowed"); } @@ -753,6 +760,7 @@ public class HarFileSystem extends FilterFileSystem { /** * not implemented. */ + @Override public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { throw new IOException("Har: copyfromlocalfile not allowed"); @@ -761,6 +769,7 @@ public class HarFileSystem extends FilterFileSystem { /** * copies the file in the har filesystem to a local file. */ + @Override public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws IOException { FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf()); @@ -769,6 +778,7 @@ public class HarFileSystem extends FilterFileSystem { /** * not implemented. */ + @Override public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { throw new IOException("Har: startLocalOutput not allowed"); @@ -777,6 +787,7 @@ public class HarFileSystem extends FilterFileSystem { /** * not implemented. */ + @Override public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { throw new IOException("Har: completeLocalOutput not allowed"); @@ -785,6 +796,7 @@ public class HarFileSystem extends FilterFileSystem { /** * not implemented. */ + @Override public void setOwner(Path p, String username, String groupname) throws IOException { throw new IOException("Har: setowner not allowed"); @@ -793,6 +805,7 @@ public class HarFileSystem extends FilterFileSystem { /** * Not implemented. */ + @Override public void setPermission(Path p, FsPermission permisssion) throws IOException { throw new IOException("Har: setPermission not allowed"); @@ -825,6 +838,7 @@ public class HarFileSystem extends FilterFileSystem { this.end = start + length; } + @Override public synchronized int available() throws IOException { long remaining = end - underLyingStream.getPos(); if (remaining > (long)Integer.MAX_VALUE) { @@ -833,6 +847,7 @@ public class HarFileSystem extends FilterFileSystem { return (int) remaining; } + @Override public synchronized void close() throws IOException { underLyingStream.close(); super.close(); @@ -847,15 +862,18 @@ public class HarFileSystem extends FilterFileSystem { /** * reset is not implemented */ + @Override public void reset() throws IOException { throw new IOException("reset not implemented."); } + @Override public synchronized int read() throws IOException { int ret = read(oneBytebuff, 0, 1); return (ret <= 0) ? -1: (oneBytebuff[0] & 0xff); } + @Override public synchronized int read(byte[] b) throws IOException { int ret = read(b, 0, b.length); if (ret != -1) { @@ -867,6 +885,7 @@ public class HarFileSystem extends FilterFileSystem { /** * */ + @Override public synchronized int read(byte[] b, int offset, int len) throws IOException { int newlen = len; @@ -882,6 +901,7 @@ public class HarFileSystem extends FilterFileSystem { return ret; } + @Override public synchronized long skip(long n) throws IOException { long tmpN = n; if (tmpN > 0) { @@ -895,10 +915,12 @@ public class HarFileSystem extends FilterFileSystem { return (tmpN < 0)? -1 : 0; } + @Override public synchronized long getPos() throws IOException { return (position - start); } + @Override public synchronized void seek(long pos) throws IOException { if (pos < 0 || (start + pos > end)) { throw new IOException("Failed to seek: EOF"); @@ -907,6 +929,7 @@ public class HarFileSystem extends FilterFileSystem { underLyingStream.seek(position); } + @Override public boolean seekToNewSource(long targetPos) throws IOException { //do not need to implement this // hdfs in itself does seektonewsource @@ -917,6 +940,7 @@ public class HarFileSystem extends FilterFileSystem { /** * implementing position readable. */ + @Override public int read(long pos, byte[] b, int offset, int length) throws IOException { int nlength = length; @@ -929,6 +953,7 @@ public class HarFileSystem extends FilterFileSystem { /** * position readable again. */ + @Override public void readFully(long pos, byte[] b, int offset, int length) throws IOException { if (start + length + pos > end) { @@ -937,6 +962,7 @@ public class HarFileSystem extends FilterFileSystem { underLyingStream.readFully(pos + start, b, offset, length); } + @Override public void readFully(long pos, byte[] b) throws IOException { readFully(pos, b, 0, b.length); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java index 394c01f7054..7db348c557d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java @@ -91,6 +91,7 @@ public class LocalFileSystem extends ChecksumFileSystem { * Moves files to a bad file directory on the same device, so that their * storage will not be reused. */ + @Override public boolean reportChecksumFailure(Path p, FSDataInputStream in, long inPos, FSDataInputStream sums, long sumsPos) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java index b0779ed82fb..01368944a4c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java @@ -94,6 +94,7 @@ public class LocatedFileStatus extends FileStatus { * @throws ClassCastException if the specified object's is not of * type FileStatus */ + @Override public int compareTo(Object o) { return super.compareTo(o); } @@ -102,6 +103,7 @@ public class LocatedFileStatus extends FileStatus { * @param o the object to be compared. * @return true if two file status has the same path name; false if not. */ + @Override public boolean equals(Object o) { return super.equals(o); } @@ -112,6 +114,7 @@ public class LocatedFileStatus extends FileStatus { * * @return a hash code value for the path name. */ + @Override public int hashCode() { return super.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java index 1c697b7f521..5bddb96f0cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java @@ -57,7 +57,7 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum { this.md5 = md5; } - /** {@inheritDoc} */ + @Override public String getAlgorithmName() { return "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC + getCrcType().name(); @@ -73,11 +73,11 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum { throw new IOException("Unknown checksum type in " + algorithm); } - - /** {@inheritDoc} */ + + @Override public int getLength() {return LENGTH;} - - /** {@inheritDoc} */ + + @Override public byte[] getBytes() { return WritableUtils.toByteArray(this); } @@ -92,14 +92,14 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum { return new ChecksumOpt(getCrcType(), bytesPerCRC); } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { bytesPerCRC = in.readInt(); crcPerBlock = in.readLong(); md5 = MD5Hash.read(in); } - - /** {@inheritDoc} */ + + @Override public void write(DataOutput out) throws IOException { out.writeInt(bytesPerCRC); out.writeLong(crcPerBlock); @@ -161,8 +161,8 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum { + ", md5=" + md5, e); } } - - /** {@inheritDoc} */ + + @Override public String toString() { return getAlgorithmName() + ":" + md5; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java index 173e16ea413..8464e512704 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java @@ -22,7 +22,6 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; -import org.apache.hadoop.HadoopIllegalArgumentException; /** * This class contains options related to file system operations. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java index 74c85af48bb..c0ebebfe67e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java @@ -261,6 +261,7 @@ public class Path implements Comparable { return new Path(getParent(), getName()+suffix); } + @Override public String toString() { // we can't use uri.toString(), which escapes everything, because we want // illegal characters unescaped in the string, for glob processing, etc. @@ -289,6 +290,7 @@ public class Path implements Comparable { return buffer.toString(); } + @Override public boolean equals(Object o) { if (!(o instanceof Path)) { return false; @@ -297,10 +299,12 @@ public class Path implements Comparable { return this.uri.equals(that.uri); } + @Override public int hashCode() { return uri.hashCode(); } + @Override public int compareTo(Object o) { Path that = (Path)o; return this.uri.compareTo(that.uri); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index 61b09149825..9727f14ce24 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -72,8 +72,10 @@ public class RawLocalFileSystem extends FileSystem { return new File(path.toUri().getPath()); } + @Override public URI getUri() { return NAME; } + @Override public void initialize(URI uri, Configuration conf) throws IOException { super.initialize(uri, conf); setConf(conf); @@ -84,6 +86,7 @@ public class RawLocalFileSystem extends FileSystem { super(f); } + @Override public int read() throws IOException { int result = super.read(); if (result != -1) { @@ -92,6 +95,7 @@ public class RawLocalFileSystem extends FileSystem { return result; } + @Override public int read(byte[] data) throws IOException { int result = super.read(data); if (result != -1) { @@ -100,6 +104,7 @@ public class RawLocalFileSystem extends FileSystem { return result; } + @Override public int read(byte[] data, int offset, int length) throws IOException { int result = super.read(data, offset, length); if (result != -1) { @@ -120,15 +125,18 @@ public class RawLocalFileSystem extends FileSystem { this.fis = new TrackingFileInputStream(pathToFile(f)); } + @Override public void seek(long pos) throws IOException { fis.getChannel().position(pos); this.position = pos; } + @Override public long getPos() throws IOException { return this.position; } + @Override public boolean seekToNewSource(long targetPos) throws IOException { return false; } @@ -136,11 +144,14 @@ public class RawLocalFileSystem extends FileSystem { /* * Just forward to the fis */ + @Override public int available() throws IOException { return fis.available(); } + @Override public void close() throws IOException { fis.close(); } @Override public boolean markSupported() { return false; } + @Override public int read() throws IOException { try { int value = fis.read(); @@ -153,6 +164,7 @@ public class RawLocalFileSystem extends FileSystem { } } + @Override public int read(byte[] b, int off, int len) throws IOException { try { int value = fis.read(b, off, len); @@ -165,6 +177,7 @@ public class RawLocalFileSystem extends FileSystem { } } + @Override public int read(long position, byte[] b, int off, int len) throws IOException { ByteBuffer bb = ByteBuffer.wrap(b, off, len); @@ -175,6 +188,7 @@ public class RawLocalFileSystem extends FileSystem { } } + @Override public long skip(long n) throws IOException { long value = fis.skip(n); if (value > 0) { @@ -189,6 +203,7 @@ public class RawLocalFileSystem extends FileSystem { } } + @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { if (!exists(f)) { throw new FileNotFoundException(f.toString()); @@ -210,8 +225,11 @@ public class RawLocalFileSystem extends FileSystem { /* * Just forward to the fos */ + @Override public void close() throws IOException { fos.close(); } + @Override public void flush() throws IOException { fos.flush(); } + @Override public void write(byte[] b, int off, int len) throws IOException { try { fos.write(b, off, len); @@ -220,6 +238,7 @@ public class RawLocalFileSystem extends FileSystem { } } + @Override public void write(int b) throws IOException { try { fos.write(b); @@ -229,7 +248,7 @@ public class RawLocalFileSystem extends FileSystem { } } - /** {@inheritDoc} */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { if (!exists(f)) { @@ -242,7 +261,6 @@ public class RawLocalFileSystem extends FileSystem { new LocalFSFileOutputStream(f, true), bufferSize), statistics); } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) @@ -264,7 +282,6 @@ public class RawLocalFileSystem extends FileSystem { new LocalFSFileOutputStream(f, false), bufferSize), statistics); } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -276,7 +293,6 @@ public class RawLocalFileSystem extends FileSystem { return out; } - /** {@inheritDoc} */ @Override public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, @@ -288,6 +304,7 @@ public class RawLocalFileSystem extends FileSystem { return out; } + @Override public boolean rename(Path src, Path dst) throws IOException { if (pathToFile(src).renameTo(pathToFile(dst))) { return true; @@ -302,6 +319,7 @@ public class RawLocalFileSystem extends FileSystem { * @return true if the file or directory and all its contents were deleted * @throws IOException if p is non-empty and recursive is false */ + @Override public boolean delete(Path p, boolean recursive) throws IOException { File f = pathToFile(p); if (f.isFile()) { @@ -313,6 +331,7 @@ public class RawLocalFileSystem extends FileSystem { return FileUtil.fullyDelete(f); } + @Override public FileStatus[] listStatus(Path f) throws IOException { File localf = pathToFile(f); FileStatus[] results; @@ -350,6 +369,7 @@ public class RawLocalFileSystem extends FileSystem { * Creates the specified directory hierarchy. Does not * treat existence as an error. */ + @Override public boolean mkdirs(Path f) throws IOException { if(f == null) { throw new IllegalArgumentException("mkdirs path arg is null"); @@ -367,7 +387,6 @@ public class RawLocalFileSystem extends FileSystem { (p2f.mkdir() || p2f.isDirectory()); } - /** {@inheritDoc} */ @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { boolean b = mkdirs(f); @@ -412,7 +431,6 @@ public class RawLocalFileSystem extends FileSystem { return this.makeQualified(new Path(System.getProperty("user.dir"))); } - /** {@inheritDoc} */ @Override public FsStatus getStatus(Path p) throws IOException { File partition = pathToFile(p == null ? new Path("/") : p); @@ -424,29 +442,35 @@ public class RawLocalFileSystem extends FileSystem { } // In the case of the local filesystem, we can just rename the file. + @Override public void moveFromLocalFile(Path src, Path dst) throws IOException { rename(src, dst); } // We can write output directly to the final location + @Override public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { return fsOutputFile; } // It's in the right place - nothing to do. + @Override public void completeLocalOutput(Path fsWorkingFile, Path tmpLocalFile) throws IOException { } + @Override public void close() throws IOException { super.close(); } + @Override public String toString() { return "LocalFS"; } + @Override public FileStatus getFileStatus(Path f) throws IOException { File path = pathToFile(f); if (path.exists()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java index 07870df1a62..1820c6619e2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java @@ -263,6 +263,7 @@ public class TrashPolicyDefault extends TrashPolicy { } } + @Override public void run() { if (emptierInterval == 0) return; // trash disabled diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 1c19ce27fb8..99ca4fbb806 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -262,6 +262,7 @@ public class FTPFileSystem extends FileSystem { } /** This optional operation is not yet supported. */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new IOException("Not supported"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java index d3ac019a944..beea508d5d0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java @@ -51,19 +51,23 @@ public class FTPInputStream extends FSInputStream { this.closed = false; } + @Override public long getPos() throws IOException { return pos; } // We don't support seek. + @Override public void seek(long pos) throws IOException { throw new IOException("Seek not supported"); } + @Override public boolean seekToNewSource(long targetPos) throws IOException { throw new IOException("Seek not supported"); } + @Override public synchronized int read() throws IOException { if (closed) { throw new IOException("Stream closed"); @@ -79,6 +83,7 @@ public class FTPInputStream extends FSInputStream { return byteRead; } + @Override public synchronized int read(byte buf[], int off, int len) throws IOException { if (closed) { throw new IOException("Stream closed"); @@ -95,6 +100,7 @@ public class FTPInputStream extends FSInputStream { return result; } + @Override public synchronized void close() throws IOException { if (closed) { throw new IOException("Stream closed"); @@ -116,14 +122,17 @@ public class FTPInputStream extends FSInputStream { // Not supported. + @Override public boolean markSupported() { return false; } + @Override public void mark(int readLimit) { // Do nothing } + @Override public void reset() throws IOException { throw new IOException("Mark not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java index 88b28ed434f..0d77a78c87f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java @@ -50,22 +50,27 @@ class KFSImpl implements IFSImpl { statistics = stats; } + @Override public boolean exists(String path) throws IOException { return kfsAccess.kfs_exists(path); } + @Override public boolean isDirectory(String path) throws IOException { return kfsAccess.kfs_isDirectory(path); } + @Override public boolean isFile(String path) throws IOException { return kfsAccess.kfs_isFile(path); } + @Override public String[] readdir(String path) throws IOException { return kfsAccess.kfs_readdir(path); } + @Override public FileStatus[] readdirplus(Path path) throws IOException { String srep = path.toUri().getPath(); KfsFileAttr[] fattr = kfsAccess.kfs_readdirplus(srep); @@ -100,52 +105,64 @@ class KFSImpl implements IFSImpl { } + @Override public int mkdirs(String path) throws IOException { return kfsAccess.kfs_mkdirs(path); } + @Override public int rename(String source, String dest) throws IOException { return kfsAccess.kfs_rename(source, dest); } + @Override public int rmdir(String path) throws IOException { return kfsAccess.kfs_rmdir(path); } + @Override public int remove(String path) throws IOException { return kfsAccess.kfs_remove(path); } + @Override public long filesize(String path) throws IOException { return kfsAccess.kfs_filesize(path); } + @Override public short getReplication(String path) throws IOException { return kfsAccess.kfs_getReplication(path); } + @Override public short setReplication(String path, short replication) throws IOException { return kfsAccess.kfs_setReplication(path, replication); } + @Override public String[][] getDataLocation(String path, long start, long len) throws IOException { return kfsAccess.kfs_getDataLocation(path, start, len); } + @Override public long getModificationTime(String path) throws IOException { return kfsAccess.kfs_getModificationTime(path); } + @Override public FSDataInputStream open(String path, int bufferSize) throws IOException { return new FSDataInputStream(new KFSInputStream(kfsAccess, path, statistics)); } + @Override public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException { return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, replication, false, progress), statistics); } + @Override public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException { // when opening for append, # of replicas is ignored return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, (short) 1, true, progress), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java index 04c937b8486..492230f064d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java @@ -53,6 +53,7 @@ class KFSInputStream extends FSInputStream { this.fsize = 0; } + @Override public long getPos() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -60,6 +61,7 @@ class KFSInputStream extends FSInputStream { return kfsChannel.tell(); } + @Override public synchronized int available() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -67,6 +69,7 @@ class KFSInputStream extends FSInputStream { return (int) (this.fsize - getPos()); } + @Override public synchronized void seek(long targetPos) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -74,10 +77,12 @@ class KFSInputStream extends FSInputStream { kfsChannel.seek(targetPos); } + @Override public synchronized boolean seekToNewSource(long targetPos) throws IOException { return false; } + @Override public synchronized int read() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -93,6 +98,7 @@ class KFSInputStream extends FSInputStream { return -1; } + @Override public synchronized int read(byte b[], int off, int len) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -109,6 +115,7 @@ class KFSInputStream extends FSInputStream { return res; } + @Override public synchronized void close() throws IOException { if (kfsChannel == null) { return; @@ -118,14 +125,17 @@ class KFSInputStream extends FSInputStream { kfsChannel = null; } + @Override public boolean markSupported() { return false; } + @Override public void mark(int readLimit) { // Do nothing } + @Override public void reset() throws IOException { throw new IOException("Mark not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java index 59cea357e6c..a50f750733b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java @@ -20,15 +20,10 @@ package org.apache.hadoop.fs.kfs; import java.io.*; -import java.net.*; -import java.util.*; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.util.Progressable; import org.kosmix.kosmosfs.access.KfsAccess; @@ -60,6 +55,7 @@ class KFSOutputStream extends OutputStream { return kfsChannel.tell(); } + @Override public void write(int v) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -70,6 +66,7 @@ class KFSOutputStream extends OutputStream { write(b, 0, 1); } + @Override public void write(byte b[], int off, int len) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -80,6 +77,7 @@ class KFSOutputStream extends OutputStream { kfsChannel.write(ByteBuffer.wrap(b, off, len)); } + @Override public void flush() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -89,6 +87,7 @@ class KFSOutputStream extends OutputStream { kfsChannel.sync(); } + @Override public synchronized void close() throws IOException { if (kfsChannel == null) { return; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index af3d5148d59..972a410b53d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -40,6 +40,7 @@ public class FsPermission implements Writable { private static final Log LOG = LogFactory.getLog(FsPermission.class); static final WritableFactory FACTORY = new WritableFactory() { + @Override public Writable newInstance() { return new FsPermission(); } }; static { // register a ctor @@ -124,12 +125,12 @@ public class FsPermission implements Writable { set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7], (((n >>> 9) & 1) == 1) ); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { out.writeShort(toShort()); } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { fromShort(in.readShort()); } @@ -155,7 +156,7 @@ public class FsPermission implements Writable { return (short)s; } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (obj instanceof FsPermission) { FsPermission that = (FsPermission)obj; @@ -167,10 +168,10 @@ public class FsPermission implements Writable { return false; } - /** {@inheritDoc} */ + @Override public int hashCode() {return toShort();} - /** {@inheritDoc} */ + @Override public String toString() { String str = useraction.SYMBOL + groupaction.SYMBOL + otheraction.SYMBOL; if(stickyBit) { @@ -300,9 +301,11 @@ public class FsPermission implements Writable { public ImmutableFsPermission(short permission) { super(permission); } + @Override public FsPermission applyUMask(FsPermission umask) { throw new UnsupportedOperationException(); } + @Override public void readFields(DataInput in) throws IOException { throw new UnsupportedOperationException(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java index f47226f1e2a..bc9e392a872 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java @@ -32,6 +32,7 @@ import java.io.IOException; @InterfaceStability.Unstable public class PermissionStatus implements Writable { static final WritableFactory FACTORY = new WritableFactory() { + @Override public Writable newInstance() { return new PermissionStatus(); } }; static { // register a ctor @@ -42,9 +43,11 @@ public class PermissionStatus implements Writable { public static PermissionStatus createImmutable( String user, String group, FsPermission permission) { return new PermissionStatus(user, group, permission) { + @Override public PermissionStatus applyUMask(FsPermission umask) { throw new UnsupportedOperationException(); } + @Override public void readFields(DataInput in) throws IOException { throw new UnsupportedOperationException(); } @@ -82,14 +85,14 @@ public class PermissionStatus implements Writable { return this; } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { username = Text.readString(in, Text.DEFAULT_MAX_LEN); groupname = Text.readString(in, Text.DEFAULT_MAX_LEN); permission = FsPermission.read(in); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { write(out, username, groupname, permission); } @@ -115,7 +118,7 @@ public class PermissionStatus implements Writable { permission.write(out); } - /** {@inheritDoc} */ + @Override public String toString() { return username + ":" + groupname + ":" + permission; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java index 6667d621892..4adc306633a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java @@ -83,6 +83,7 @@ class Jets3tFileSystemStore implements FileSystemStore { private static final Log LOG = LogFactory.getLog(Jets3tFileSystemStore.class.getName()); + @Override public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; @@ -108,6 +109,7 @@ class Jets3tFileSystemStore implements FileSystemStore { ); } + @Override public String getVersion() throws IOException { return FILE_SYSTEM_VERSION_VALUE; } @@ -123,14 +125,17 @@ class Jets3tFileSystemStore implements FileSystemStore { } } + @Override public void deleteINode(Path path) throws IOException { delete(pathToKey(path)); } + @Override public void deleteBlock(Block block) throws IOException { delete(blockToKey(block)); } + @Override public boolean inodeExists(Path path) throws IOException { InputStream in = get(pathToKey(path), true); if (in == null) { @@ -140,6 +145,7 @@ class Jets3tFileSystemStore implements FileSystemStore { return true; } + @Override public boolean blockExists(long blockId) throws IOException { InputStream in = get(blockToKey(blockId), false); if (in == null) { @@ -203,10 +209,12 @@ class Jets3tFileSystemStore implements FileSystemStore { } } + @Override public INode retrieveINode(Path path) throws IOException { return INode.deserialize(get(pathToKey(path), true)); } + @Override public File retrieveBlock(Block block, long byteRangeStart) throws IOException { File fileBlock = null; @@ -249,6 +257,7 @@ class Jets3tFileSystemStore implements FileSystemStore { return result; } + @Override public Set listSubPaths(Path path) throws IOException { try { String prefix = pathToKey(path); @@ -270,6 +279,7 @@ class Jets3tFileSystemStore implements FileSystemStore { } } + @Override public Set listDeepSubPaths(Path path) throws IOException { try { String prefix = pathToKey(path); @@ -311,10 +321,12 @@ class Jets3tFileSystemStore implements FileSystemStore { } } + @Override public void storeINode(Path path, INode inode) throws IOException { put(pathToKey(path), inode.serialize(), inode.getSerializedLength(), true); } + @Override public void storeBlock(Block block, File file) throws IOException { BufferedInputStream in = null; try { @@ -354,6 +366,7 @@ class Jets3tFileSystemStore implements FileSystemStore { return blockToKey(block.getId()); } + @Override public void purge() throws IOException { try { S3Object[] objects = s3Service.listObjects(bucket); @@ -368,6 +381,7 @@ class Jets3tFileSystemStore implements FileSystemStore { } } + @Override public void dump() throws IOException { StringBuilder sb = new StringBuilder("S3 Filesystem, "); sb.append(bucket.getName()).append("\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java index f82755781e7..416bfb17c46 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java @@ -61,6 +61,7 @@ public class MigrationTool extends Configured implements Tool { System.exit(res); } + @Override public int run(String[] args) throws Exception { if (args.length == 0) { @@ -195,6 +196,7 @@ public class MigrationTool extends Configured implements Tool { class UnversionedStore implements Store { + @Override public Set listAllPaths() throws IOException { try { String prefix = urlEncode(Path.SEPARATOR); @@ -212,6 +214,7 @@ public class MigrationTool extends Configured implements Tool { } } + @Override public void deleteINode(Path path) throws IOException { delete(pathToKey(path)); } @@ -227,6 +230,7 @@ public class MigrationTool extends Configured implements Tool { } } + @Override public INode retrieveINode(Path path) throws IOException { return INode.deserialize(get(pathToKey(path))); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java index 5a5d628adb0..81ef31446e5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java @@ -206,6 +206,7 @@ public class S3FileSystem extends FileSystem { } /** This optional operation is not yet supported. */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new IOException("Not supported"); @@ -298,6 +299,7 @@ public class S3FileSystem extends FileSystem { return true; } + @Override public boolean delete(Path path, boolean recursive) throws IOException { Path absolutePath = makeAbsolute(path); INode inode = store.retrieveINode(absolutePath); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java index c2293ba6828..400419c110b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java @@ -49,6 +49,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { private S3Service s3Service; private S3Bucket bucket; + @Override public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); @@ -63,6 +64,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { bucket = new S3Bucket(uri.getHost()); } + @Override public void storeFile(String key, File file, byte[] md5Hash) throws IOException { @@ -90,6 +92,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public void storeEmptyFile(String key) throws IOException { try { S3Object object = new S3Object(key); @@ -102,6 +105,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public FileMetadata retrieveMetadata(String key) throws IOException { try { S3Object object = s3Service.getObjectDetails(bucket, key); @@ -117,6 +121,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public InputStream retrieve(String key) throws IOException { try { S3Object object = s3Service.getObject(bucket, key); @@ -127,6 +132,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public InputStream retrieve(String key, long byteRangeStart) throws IOException { try { @@ -139,11 +145,13 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public PartialListing list(String prefix, int maxListingLength) throws IOException { return list(prefix, maxListingLength, null, false); } + @Override public PartialListing list(String prefix, int maxListingLength, String priorLastKey, boolean recurse) throws IOException { @@ -175,6 +183,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public void delete(String key) throws IOException { try { s3Service.deleteObject(bucket, key); @@ -183,6 +192,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public void copy(String srcKey, String dstKey) throws IOException { try { s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(), @@ -192,6 +202,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public void purge(String prefix) throws IOException { try { S3Object[] objects = s3Service.listObjects(bucket, prefix, null); @@ -203,6 +214,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public void dump() throws IOException { StringBuilder sb = new StringBuilder("S3 Native Filesystem, "); sb.append(bucket.getName()).append("\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java index eea429a97e5..e1aeea94acf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java @@ -150,6 +150,7 @@ public class CommandFormat { actual = got; } + @Override public String getMessage() { return "expected " + expected + " but got " + actual; } @@ -165,6 +166,7 @@ public class CommandFormat { super(expected, actual); } + @Override public String getMessage() { return "Too many arguments: " + super.getMessage(); } @@ -180,6 +182,7 @@ public class CommandFormat { super(expected, actual); } + @Override public String getMessage() { return "Not enough arguments: " + super.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java index 71bfc9510dd..bc1d8af9510 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java @@ -114,6 +114,7 @@ class Delete { static class Rmr extends Rm { public static final String NAME = "rmr"; + @Override protected void processOptions(LinkedList args) throws IOException { args.addFirst("-r"); super.processOptions(args); @@ -136,6 +137,7 @@ class Delete { private boolean ignoreNonEmpty = false; + @Override protected void processOptions(LinkedList args) throws IOException { CommandFormat cf = new CommandFormat( 1, Integer.MAX_VALUE, "-ignore-fail-on-non-empty"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java index 1dfd2f48a2c..d80b2d6686a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java @@ -162,6 +162,7 @@ class Display extends FsCommand { outbuf = new DataOutputBuffer(); } + @Override public int read() throws IOException { int ret; if (null == inbuf || -1 == (ret = inbuf.read())) { @@ -181,6 +182,7 @@ class Display extends FsCommand { return ret; } + @Override public void close() throws IOException { r.close(); super.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java index 3f397327de3..2541be393b2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java @@ -73,6 +73,7 @@ abstract public class FsCommand extends Command { // abstract method that normally is invoked by runall() which is // overridden below + @Override protected void run(Path path) throws IOException { throw new RuntimeException("not supposed to get here"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java index b53d2820de4..04574cf6730 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java @@ -380,6 +380,7 @@ public class PathData implements Comparable { * as given on the commandline, or the full path * @return String of the path */ + @Override public String toString() { String scheme = uri.getScheme(); // No interpretation of symbols. Just decode % escaped chars. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java index 85426fa4fff..95d0a2d456a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java @@ -102,6 +102,7 @@ class ChRootedFileSystem extends FilterFileSystem { * for this FileSystem * @param conf the configuration */ + @Override public void initialize(final URI name, final Configuration conf) throws IOException { super.initialize(name, conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java index f92108cfe75..143ce68ebbd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java @@ -20,10 +20,6 @@ package org.apache.hadoop.fs.viewfs; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; - import org.apache.hadoop.fs.Path; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index 1c0c8dac4df..6031daf1186 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -164,6 +164,7 @@ public class ViewFileSystem extends FileSystem { * this FileSystem * @param conf the configuration */ + @Override public void initialize(final URI theUri, final Configuration conf) throws IOException { super.initialize(theUri, conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java index 871e3d8a637..e0f62e453b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java @@ -42,7 +42,8 @@ class ViewFsFileStatus extends FileStatus { return super.equals(o); } - public int hashCode() { + @Override + public int hashCode() { return super.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index a4ed255debe..52875810735 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -892,6 +892,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback { final List acl, final CreateMode mode) throws InterruptedException, KeeperException { return zkDoWithRetries(new ZKAction() { + @Override public String run() throws KeeperException, InterruptedException { return zkClient.create(path, data, acl, mode); } @@ -901,6 +902,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback { private byte[] getDataWithRetries(final String path, final boolean watch, final Stat stat) throws InterruptedException, KeeperException { return zkDoWithRetries(new ZKAction() { + @Override public byte[] run() throws KeeperException, InterruptedException { return zkClient.getData(path, watch, stat); } @@ -910,6 +912,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback { private Stat setDataWithRetries(final String path, final byte[] data, final int version) throws InterruptedException, KeeperException { return zkDoWithRetries(new ZKAction() { + @Override public Stat run() throws KeeperException, InterruptedException { return zkClient.setData(path, data, version); } @@ -919,6 +922,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback { private void deleteWithRetries(final String path, final int version) throws KeeperException, InterruptedException { zkDoWithRetries(new ZKAction() { + @Override public Void run() throws KeeperException, InterruptedException { zkClient.delete(path, version); return null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java index 1861c4e9689..db7f6a91f25 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java @@ -56,6 +56,7 @@ public interface HAServiceProtocol { this.name = name; } + @Override public String toString() { return name; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java index 06fb648f428..4898b38726f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java @@ -184,6 +184,7 @@ public class NodeFencer { this.arg = arg; } + @Override public String toString() { return method.getClass().getCanonicalName() + "(" + arg + ")"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java index 537fba942de..343693e95c1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java @@ -274,6 +274,7 @@ public class SshFenceByTcpPort extends Configured static final Log LOG = LogFactory.getLog( SshFenceByTcpPort.class.getName() + ".jsch"); + @Override public boolean isEnabled(int level) { switch (level) { case com.jcraft.jsch.Logger.DEBUG: @@ -291,6 +292,7 @@ public class SshFenceByTcpPort extends Configured } } + @Override public void log(int level, String message) { switch (level) { case com.jcraft.jsch.Logger.DEBUG: diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java index b19d5e69afd..3e156d286fe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java @@ -473,7 +473,7 @@ public class HttpServer implements FilterContainer { } } - /** {@inheritDoc} */ + @Override public void addFilter(String name, String classname, Map parameters) { @@ -493,7 +493,7 @@ public class HttpServer implements FilterContainer { filterNames.add(name); } - /** {@inheritDoc} */ + @Override public void addGlobalFilter(String name, String classname, Map parameters) { final String[] ALL_URLS = { "/*" }; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java index bb2f163fe47..6bd9efc689e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java @@ -164,16 +164,18 @@ public abstract class AbstractMapWritable implements Writable, Configurable { } /** @return the conf */ + @Override public Configuration getConf() { return conf.get(); } /** @param conf the conf to set */ + @Override public void setConf(Configuration conf) { this.conf.set(conf); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { // First write out the size of the class table and any classes that are @@ -187,7 +189,7 @@ public abstract class AbstractMapWritable implements Writable, Configurable { } } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { // Get the number of "unknown" classes diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java index 875d6efdc2f..122aa5ca1e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java @@ -88,6 +88,7 @@ public class ArrayWritable implements Writable { public Writable[] get() { return values; } + @Override public void readFields(DataInput in) throws IOException { values = new Writable[in.readInt()]; // construct values for (int i = 0; i < values.length; i++) { @@ -97,6 +98,7 @@ public class ArrayWritable implements Writable { } } + @Override public void write(DataOutput out) throws IOException { out.writeInt(values.length); // write values for (int i = 0; i < values.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java index 71279b4f6d7..0079079a792 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java @@ -57,12 +57,14 @@ public class BooleanWritable implements WritableComparable { /** */ + @Override public void readFields(DataInput in) throws IOException { value = in.readBoolean(); } /** */ + @Override public void write(DataOutput out) throws IOException { out.writeBoolean(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java index ff926c11c14..ffcdea2c9a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java @@ -39,10 +39,12 @@ public class ByteWritable implements WritableComparable { /** Return the value of this ByteWritable. */ public byte get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readByte(); } + @Override public void write(DataOutput out) throws IOException { out.writeByte(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java index 012a3bc9d7e..7e42a36cb76 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java @@ -81,6 +81,7 @@ public class BytesWritable extends BinaryComparable * if you need the returned array to be precisely the length of the data. * @return The data is only valid between 0 and getLength() - 1. */ + @Override public byte[] getBytes() { return bytes; } @@ -97,6 +98,7 @@ public class BytesWritable extends BinaryComparable /** * Get the current size of the buffer. */ + @Override public int getLength() { return size; } @@ -171,6 +173,7 @@ public class BytesWritable extends BinaryComparable } // inherit javadoc + @Override public void readFields(DataInput in) throws IOException { setSize(0); // clear the old data setSize(in.readInt()); @@ -178,6 +181,7 @@ public class BytesWritable extends BinaryComparable } // inherit javadoc + @Override public void write(DataOutput out) throws IOException { out.writeInt(size); out.write(bytes, 0, size); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java index ad3164b2d2c..6550e1f2fde 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java @@ -45,6 +45,7 @@ public abstract class CompressedWritable implements Writable { public CompressedWritable() {} + @Override public final void readFields(DataInput in) throws IOException { compressed = new byte[in.readInt()]; in.readFully(compressed, 0, compressed.length); @@ -70,6 +71,7 @@ public abstract class CompressedWritable implements Writable { protected abstract void readFieldsCompressed(DataInput in) throws IOException; + @Override public final void write(DataOutput out) throws IOException { if (compressed == null) { ByteArrayOutputStream deflated = new ByteArrayOutputStream(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java index 469d3ff863c..2cd59d75dc2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java @@ -21,8 +21,6 @@ package org.apache.hadoop.io; import java.io.DataInputStream; import java.io.InputStream; import java.nio.ByteBuffer; -import java.util.LinkedList; -import java.util.List; public class DataInputByteBuffer extends DataInputStream { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java index 6cd1f497220..2b8e259464b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java @@ -72,6 +72,7 @@ public class DefaultStringifier implements Stringifier { } } + @Override public T fromString(String str) throws IOException { try { byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8")); @@ -83,6 +84,7 @@ public class DefaultStringifier implements Stringifier { } } + @Override public String toString(T obj) throws IOException { outBuf.reset(); serializer.serialize(obj); @@ -91,6 +93,7 @@ public class DefaultStringifier implements Stringifier { return new String(Base64.encodeBase64(buf)); } + @Override public void close() throws IOException { inBuf.close(); outBuf.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java index a984cd4ef5f..5cc326fe3c2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java @@ -42,10 +42,12 @@ public class DoubleWritable implements WritableComparable { set(value); } + @Override public void readFields(DataInput in) throws IOException { value = in.readDouble(); } + @Override public void write(DataOutput out) throws IOException { out.writeDouble(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java index c1ff1ca3bfe..dc430cc29c3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java @@ -23,7 +23,6 @@ import java.io.DataOutput; import java.io.IOException; import java.util.EnumSet; import java.util.Iterator; -import java.util.Collection; import java.util.AbstractCollection; import org.apache.hadoop.classification.InterfaceAudience; @@ -46,8 +45,11 @@ public class EnumSetWritable> extends AbstractCollection EnumSetWritable() { } + @Override public Iterator iterator() { return value.iterator(); } + @Override public int size() { return value.size(); } + @Override public boolean add(E e) { if (value == null) { value = EnumSet.of(e); @@ -109,7 +111,7 @@ public class EnumSetWritable> extends AbstractCollection return value; } - /** {@inheritDoc} */ + @Override @SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { int length = in.readInt(); @@ -127,7 +129,7 @@ public class EnumSetWritable> extends AbstractCollection } } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { if (this.value == null) { out.writeInt(-1); @@ -152,6 +154,7 @@ public class EnumSetWritable> extends AbstractCollection * Returns true if o is an EnumSetWritable with the same value, * or both are null. */ + @Override public boolean equals(Object o) { if (o == null) { throw new IllegalArgumentException("null argument passed in equal()."); @@ -180,27 +183,25 @@ public class EnumSetWritable> extends AbstractCollection return elementType; } - /** {@inheritDoc} */ + @Override public int hashCode() { if (value == null) return 0; return (int) value.hashCode(); } - /** {@inheritDoc} */ + @Override public String toString() { if (value == null) return "(null)"; return value.toString(); } - /** {@inheritDoc} */ @Override public Configuration getConf() { return this.conf; } - /** {@inheritDoc} */ @Override public void setConf(Configuration conf) { this.conf = conf; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java index 4ade2c4d62d..21e4cc4f5b1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java @@ -39,10 +39,12 @@ public class FloatWritable implements WritableComparable { /** Return the value of this FloatWritable. */ public float get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readFloat(); } + @Override public void write(DataOutput out) throws IOException { out.writeFloat(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java index 8268a5a915f..7cfeed7f931 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java @@ -114,11 +114,13 @@ public abstract class GenericWritable implements Writable, Configurable { return instance; } + @Override public String toString() { return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() + ",value=" + instance.toString()) : "(null)") + "]"; } + @Override public void readFields(DataInput in) throws IOException { type = in.readByte(); Class clazz = getTypes()[type & 0xff]; @@ -131,6 +133,7 @@ public abstract class GenericWritable implements Writable, Configurable { instance.readFields(in); } + @Override public void write(DataOutput out) throws IOException { if (type == NOT_SET || instance == null) throw new IOException("The GenericWritable has NOT been set correctly. type=" @@ -145,10 +148,12 @@ public abstract class GenericWritable implements Writable, Configurable { */ abstract protected Class[] getTypes(); + @Override public Configuration getConf() { return conf; } + @Override public void setConf(Configuration conf) { this.conf = conf; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index 819f075812b..a3315a869e4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -272,9 +272,11 @@ public class IOUtils { * The /dev/null of OutputStreams. */ public static class NullOutputStream extends OutputStream { + @Override public void write(byte[] b, int off, int len) throws IOException { } + @Override public void write(int b) throws IOException { } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java index 6a44d81db63..f656d028cb0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java @@ -42,10 +42,12 @@ public class IntWritable implements WritableComparable { /** Return the value of this IntWritable. */ public int get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readInt(); } + @Override public void write(DataOutput out) throws IOException { out.writeInt(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java index b9d64d904dd..6dec4aa618a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java @@ -42,15 +42,18 @@ public class LongWritable implements WritableComparable { /** Return the value of this LongWritable. */ public long get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readLong(); } + @Override public void write(DataOutput out) throws IOException { out.writeLong(value); } /** Returns true iff o is a LongWritable with the same value. */ + @Override public boolean equals(Object o) { if (!(o instanceof LongWritable)) return false; @@ -58,17 +61,20 @@ public class LongWritable implements WritableComparable { return this.value == other.value; } + @Override public int hashCode() { return (int)value; } /** Compares two LongWritables. */ + @Override public int compareTo(LongWritable o) { long thisValue = this.value; long thatValue = o.value; return (thisValue { super(LongWritable.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { long thisValue = readLong(b1, s1); @@ -94,6 +101,7 @@ public class LongWritable implements WritableComparable { public int compare(WritableComparable a, WritableComparable b) { return -super.compare(a, b); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { return -super.compare(b1, s1, l1, b2, s2, l2); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java index 54be96bfa2a..87c89824a6a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java @@ -36,6 +36,7 @@ public class MD5Hash implements WritableComparable { public static final int MD5_LEN = 16; private static ThreadLocal DIGESTER_FACTORY = new ThreadLocal() { + @Override protected MessageDigest initialValue() { try { return MessageDigest.getInstance("MD5"); @@ -65,6 +66,7 @@ public class MD5Hash implements WritableComparable { } // javadoc from Writable + @Override public void readFields(DataInput in) throws IOException { in.readFully(digest); } @@ -77,6 +79,7 @@ public class MD5Hash implements WritableComparable { } // javadoc from Writable + @Override public void write(DataOutput out) throws IOException { out.write(digest); } @@ -155,6 +158,7 @@ public class MD5Hash implements WritableComparable { /** Returns true iff o is an MD5Hash whose digest contains the * same values. */ + @Override public boolean equals(Object o) { if (!(o instanceof MD5Hash)) return false; @@ -165,12 +169,14 @@ public class MD5Hash implements WritableComparable { /** Returns a hash code value for this object. * Only uses the first 4 bytes, since md5s are evenly distributed. */ + @Override public int hashCode() { return quarterDigest(); } /** Compares this object with the specified object for order.*/ + @Override public int compareTo(MD5Hash that) { return WritableComparator.compareBytes(this.digest, 0, MD5_LEN, that.digest, 0, MD5_LEN); @@ -182,6 +188,7 @@ public class MD5Hash implements WritableComparable { super(MD5Hash.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN); @@ -196,6 +203,7 @@ public class MD5Hash implements WritableComparable { {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'}; /** Returns a string representation of this object. */ + @Override public String toString() { StringBuilder buf = new StringBuilder(MD5_LEN*2); for (int i = 0; i < MD5_LEN; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 9c14402d759..7e7d855f827 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -296,6 +296,7 @@ public class MapFile { } /** Close the map. */ + @Override public synchronized void close() throws IOException { data.close(); index.close(); @@ -723,6 +724,7 @@ public class MapFile { } /** Close the map. */ + @Override public synchronized void close() throws IOException { if (!indexClosed) { index.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java index 377c9c1656a..72c7098d7af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java @@ -55,27 +55,27 @@ public class MapWritable extends AbstractMapWritable copy(other); } - /** {@inheritDoc} */ + @Override public void clear() { instance.clear(); } - /** {@inheritDoc} */ + @Override public boolean containsKey(Object key) { return instance.containsKey(key); } - /** {@inheritDoc} */ + @Override public boolean containsValue(Object value) { return instance.containsValue(value); } - /** {@inheritDoc} */ + @Override public Set> entrySet() { return instance.entrySet(); } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (this == obj) { return true; @@ -93,27 +93,27 @@ public class MapWritable extends AbstractMapWritable return false; } - /** {@inheritDoc} */ + @Override public Writable get(Object key) { return instance.get(key); } - /** {@inheritDoc} */ + @Override public int hashCode() { return 1 + this.instance.hashCode(); } - /** {@inheritDoc} */ + @Override public boolean isEmpty() { return instance.isEmpty(); } - /** {@inheritDoc} */ + @Override public Set keySet() { return instance.keySet(); } - /** {@inheritDoc} */ + @Override @SuppressWarnings("unchecked") public Writable put(Writable key, Writable value) { addToMap(key.getClass()); @@ -121,31 +121,30 @@ public class MapWritable extends AbstractMapWritable return instance.put(key, value); } - /** {@inheritDoc} */ + @Override public void putAll(Map t) { for (Map.Entry e: t.entrySet()) { put(e.getKey(), e.getValue()); } } - /** {@inheritDoc} */ + @Override public Writable remove(Object key) { return instance.remove(key); } - /** {@inheritDoc} */ + @Override public int size() { return instance.size(); } - /** {@inheritDoc} */ + @Override public Collection values() { return instance.values(); } // Writable - /** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); @@ -164,7 +163,6 @@ public class MapWritable extends AbstractMapWritable } } - /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void readFields(DataInput in) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java index beb7b17ce72..77c590fdb63 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java @@ -35,6 +35,7 @@ public class NullWritable implements WritableComparable { /** Returns the single instance of this class. */ public static NullWritable get() { return THIS; } + @Override public String toString() { return "(null)"; } @@ -46,8 +47,11 @@ public class NullWritable implements WritableComparable { public int compareTo(NullWritable other) { return 0; } + @Override public boolean equals(Object other) { return other instanceof NullWritable; } + @Override public void readFields(DataInput in) throws IOException {} + @Override public void write(DataOutput out) throws IOException {} /** A Comparator "optimized" for NullWritable. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java index c5551110976..0f0f5c7405a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java @@ -66,15 +66,18 @@ public class ObjectWritable implements Writable, Configurable { this.instance = instance; } + @Override public String toString() { return "OW[class=" + declaredClass + ",value=" + instance + "]"; } + @Override public void readFields(DataInput in) throws IOException { readObject(in, this, this.conf); } + @Override public void write(DataOutput out) throws IOException { writeObject(out, instance, declaredClass, conf); } @@ -99,6 +102,7 @@ public class ObjectWritable implements Writable, Configurable { super(conf); this.declaredClass = declaredClass; } + @Override public void readFields(DataInput in) throws IOException { String className = UTF8.readString(in); declaredClass = PRIMITIVE_NAMES.get(className); @@ -110,6 +114,7 @@ public class ObjectWritable implements Writable, Configurable { } } } + @Override public void write(DataOutput out) throws IOException { UTF8.writeString(out, declaredClass.getName()); } @@ -375,10 +380,12 @@ public class ObjectWritable implements Writable, Configurable { return declaredClass; } + @Override public void setConf(Configuration conf) { this.conf = conf; } + @Override public Configuration getConf() { return this.conf; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java index b7605db9a9c..15a396dc2bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java @@ -50,6 +50,7 @@ public class OutputBuffer extends FilterOutputStream { private static class Buffer extends ByteArrayOutputStream { public byte[] getData() { return buf; } public int getLength() { return count; } + @Override public void reset() { count = 0; } public void write(InputStream in, int len) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java index 046d9e4b736..f1545b69c90 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java @@ -194,6 +194,7 @@ public class ReadaheadPool { this.len = len; } + @Override public void run() { if (canceled) return; // There's a very narrow race here that the file will close right at diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java index 6bc798e7e3b..b30c4a4da44 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java @@ -24,7 +24,6 @@ import java.io.FileOutputStream; import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 6ffa436cea0..0388acaeafb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -625,15 +625,18 @@ public class SequenceFile { dataSize = length; } + @Override public int getSize() { return dataSize; } + @Override public void writeUncompressedBytes(DataOutputStream outStream) throws IOException { outStream.write(data, 0, dataSize); } + @Override public void writeCompressedBytes(DataOutputStream outStream) throws IllegalArgumentException, IOException { throw @@ -666,10 +669,12 @@ public class SequenceFile { dataSize = length; } + @Override public int getSize() { return dataSize; } + @Override public void writeUncompressedBytes(DataOutputStream outStream) throws IOException { if (decompressedStream == null) { @@ -687,6 +692,7 @@ public class SequenceFile { } } + @Override public void writeCompressedBytes(DataOutputStream outStream) throws IllegalArgumentException, IOException { outStream.write(data, 0, dataSize); @@ -728,6 +734,7 @@ public class SequenceFile { return new TreeMap(this.theMetadata); } + @Override public void write(DataOutput out) throws IOException { out.writeInt(this.theMetadata.size()); Iterator> iter = @@ -739,6 +746,7 @@ public class SequenceFile { } } + @Override public void readFields(DataInput in) throws IOException { int sz = in.readInt(); if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object"); @@ -752,6 +760,7 @@ public class SequenceFile { } } + @Override public boolean equals(Object other) { if (other == null) { return false; @@ -788,11 +797,13 @@ public class SequenceFile { return true; } + @Override public int hashCode() { assert false : "hashCode not designed"; return 42; // any arbitrary constant will do } + @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("size: ").append(this.theMetadata.size()).append("\n"); @@ -1250,6 +1261,7 @@ public class SequenceFile { Configuration getConf() { return conf; } /** Close the file. */ + @Override public synchronized void close() throws IOException { keySerializer.close(); uncompressedValSerializer.close(); @@ -1360,6 +1372,7 @@ public class SequenceFile { } /** Append a key/value pair. */ + @Override @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { @@ -1392,6 +1405,7 @@ public class SequenceFile { } /** Append a key/value pair. */ + @Override public synchronized void appendRaw(byte[] keyData, int keyOffset, int keyLength, ValueBytes val) throws IOException { @@ -1449,6 +1463,7 @@ public class SequenceFile { } /** Compress and flush contents to dfs */ + @Override public synchronized void sync() throws IOException { if (noBufferedRecords > 0) { super.sync(); @@ -1478,6 +1493,7 @@ public class SequenceFile { } /** Close the file. */ + @Override public synchronized void close() throws IOException { if (out != null) { sync(); @@ -1486,6 +1502,7 @@ public class SequenceFile { } /** Append a key/value pair. */ + @Override @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { @@ -1518,6 +1535,7 @@ public class SequenceFile { } /** Append a key/value pair. */ + @Override public synchronized void appendRaw(byte[] keyData, int keyOffset, int keyLength, ValueBytes val) throws IOException { @@ -1960,6 +1978,7 @@ public class SequenceFile { } /** Close the file. */ + @Override public synchronized void close() throws IOException { // Return the decompressors to the pool CodecPool.returnDecompressor(keyLenDecompressor); @@ -2618,6 +2637,7 @@ public class SequenceFile { } /** Returns the name of the file. */ + @Override public String toString() { return filename; } @@ -2948,6 +2968,7 @@ public class SequenceFile { mergeSort.mergeSort(pointersCopy, pointers, 0, count); } class SeqFileComparator implements Comparator { + @Override public int compare(IntWritable I, IntWritable J) { return comparator.compare(rawBuffer, keyOffsets[I.get()], keyLengths[I.get()], rawBuffer, @@ -3221,6 +3242,7 @@ public class SequenceFile { this.tmpDir = tmpDir; this.progress = progress; } + @Override protected boolean lessThan(Object a, Object b) { // indicate we're making progress if (progress != null) { @@ -3232,6 +3254,7 @@ public class SequenceFile { msa.getKey().getLength(), msb.getKey().getData(), 0, msb.getKey().getLength()) < 0; } + @Override public void close() throws IOException { SegmentDescriptor ms; // close inputs while ((ms = (SegmentDescriptor)pop()) != null) { @@ -3239,12 +3262,15 @@ public class SequenceFile { } minSegment = null; } + @Override public DataOutputBuffer getKey() throws IOException { return rawKey; } + @Override public ValueBytes getValue() throws IOException { return rawValue; } + @Override public boolean next() throws IOException { if (size() == 0) return false; @@ -3272,6 +3298,7 @@ public class SequenceFile { return true; } + @Override public Progress getProgress() { return mergeProgress; } @@ -3469,6 +3496,7 @@ public class SequenceFile { return preserveInput; } + @Override public int compareTo(Object o) { SegmentDescriptor that = (SegmentDescriptor)o; if (this.segmentLength != that.segmentLength) { @@ -3481,6 +3509,7 @@ public class SequenceFile { compareTo(that.segmentPathName.toString()); } + @Override public boolean equals(Object o) { if (!(o instanceof SegmentDescriptor)) { return false; @@ -3495,6 +3524,7 @@ public class SequenceFile { return false; } + @Override public int hashCode() { return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32)); } @@ -3584,12 +3614,14 @@ public class SequenceFile { /** The default cleanup. Subclasses can override this with a custom * cleanup */ + @Override public void cleanup() throws IOException { super.close(); if (super.shouldPreserveInput()) return; parentContainer.cleanup(); } + @Override public boolean equals(Object o) { if (!(o instanceof LinkedSegmentsDescriptor)) { return false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java index 9ba0023190a..068ca9d40e5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java @@ -87,6 +87,7 @@ public class SetFile extends MapFile { } // javadoc inherited + @Override public boolean seek(WritableComparable key) throws IOException { return super.seek(key); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java index d870a5fd840..eee744ec6a2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java @@ -57,86 +57,86 @@ public class SortedMapWritable extends AbstractMapWritable copy(other); } - /** {@inheritDoc} */ + @Override public Comparator comparator() { // Returning null means we use the natural ordering of the keys return null; } - /** {@inheritDoc} */ + @Override public WritableComparable firstKey() { return instance.firstKey(); } - /** {@inheritDoc} */ + @Override public SortedMap headMap(WritableComparable toKey) { return instance.headMap(toKey); } - /** {@inheritDoc} */ + @Override public WritableComparable lastKey() { return instance.lastKey(); } - /** {@inheritDoc} */ + @Override public SortedMap subMap(WritableComparable fromKey, WritableComparable toKey) { return instance.subMap(fromKey, toKey); } - /** {@inheritDoc} */ + @Override public SortedMap tailMap(WritableComparable fromKey) { return instance.tailMap(fromKey); } - /** {@inheritDoc} */ + @Override public void clear() { instance.clear(); } - /** {@inheritDoc} */ + @Override public boolean containsKey(Object key) { return instance.containsKey(key); } - /** {@inheritDoc} */ + @Override public boolean containsValue(Object value) { return instance.containsValue(value); } - /** {@inheritDoc} */ + @Override public Set> entrySet() { return instance.entrySet(); } - /** {@inheritDoc} */ + @Override public Writable get(Object key) { return instance.get(key); } - /** {@inheritDoc} */ + @Override public boolean isEmpty() { return instance.isEmpty(); } - /** {@inheritDoc} */ + @Override public Set keySet() { return instance.keySet(); } - /** {@inheritDoc} */ + @Override public Writable put(WritableComparable key, Writable value) { addToMap(key.getClass()); addToMap(value.getClass()); return instance.put(key, value); } - /** {@inheritDoc} */ + @Override public void putAll(Map t) { for (Map.Entry e: t.entrySet()) { @@ -145,22 +145,21 @@ public class SortedMapWritable extends AbstractMapWritable } } - /** {@inheritDoc} */ + @Override public Writable remove(Object key) { return instance.remove(key); } - /** {@inheritDoc} */ + @Override public int size() { return instance.size(); } - /** {@inheritDoc} */ + @Override public Collection values() { return instance.values(); } - /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void readFields(DataInput in) throws IOException { @@ -187,7 +186,6 @@ public class SortedMapWritable extends AbstractMapWritable } } - /** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java index a7ee6876d40..949b14ae577 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java @@ -54,6 +54,7 @@ public interface Stringifier extends java.io.Closeable { * Closes this object. * @throws IOException if an I/O error occurs * */ + @Override public void close() throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java index a4f80ea8864..95fb174a9d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java @@ -55,6 +55,7 @@ public class Text extends BinaryComparable private static ThreadLocal ENCODER_FACTORY = new ThreadLocal() { + @Override protected CharsetEncoder initialValue() { return Charset.forName("UTF-8").newEncoder(). onMalformedInput(CodingErrorAction.REPORT). @@ -64,6 +65,7 @@ public class Text extends BinaryComparable private static ThreadLocal DECODER_FACTORY = new ThreadLocal() { + @Override protected CharsetDecoder initialValue() { return Charset.forName("UTF-8").newDecoder(). onMalformedInput(CodingErrorAction.REPORT). @@ -112,11 +114,13 @@ public class Text extends BinaryComparable * valid. Please use {@link #copyBytes()} if you * need the returned array to be precisely the length of the data. */ + @Override public byte[] getBytes() { return bytes; } /** Returns the number of bytes in the byte array */ + @Override public int getLength() { return length; } @@ -281,6 +285,7 @@ public class Text extends BinaryComparable /** deserialize */ + @Override public void readFields(DataInput in) throws IOException { int newLength = WritableUtils.readVInt(in); setCapacity(newLength, false); @@ -313,6 +318,7 @@ public class Text extends BinaryComparable * length uses zero-compressed encoding * @see Writable#write(DataOutput) */ + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, length); out.write(bytes, 0, length); @@ -329,6 +335,7 @@ public class Text extends BinaryComparable } /** Returns true iff o is a Text with the same contents. */ + @Override public boolean equals(Object o) { if (o instanceof Text) return super.equals(o); @@ -346,6 +353,7 @@ public class Text extends BinaryComparable super(Text.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { int n1 = WritableUtils.decodeVIntSize(b1[s1]); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java index 76304623eef..cf8947d32d0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java @@ -57,6 +57,7 @@ public class TwoDArrayWritable implements Writable { public Writable[][] get() { return values; } + @Override public void readFields(DataInput in) throws IOException { // construct matrix values = new Writable[in.readInt()][]; @@ -81,6 +82,7 @@ public class TwoDArrayWritable implements Writable { } } + @Override public void write(DataOutput out) throws IOException { out.writeInt(values.length); // write values for (int i = 0; i < values.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index 6a0f88673fe..ef7512996c7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -110,6 +110,7 @@ public class UTF8 implements WritableComparable { System.arraycopy(other.bytes, 0, bytes, 0, length); } + @Override public void readFields(DataInput in) throws IOException { length = in.readUnsignedShort(); if (bytes == null || bytes.length < length) @@ -123,6 +124,7 @@ public class UTF8 implements WritableComparable { WritableUtils.skipFully(in, length); } + @Override public void write(DataOutput out) throws IOException { out.writeShort(length); out.write(bytes, 0, length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java index e37b144dbff..f537524c4b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java @@ -43,10 +43,12 @@ public class VIntWritable implements WritableComparable { /** Return the value of this VIntWritable. */ public int get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = WritableUtils.readVInt(in); } + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java index 869bf43914e..a9fac30605b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java @@ -43,10 +43,12 @@ public class VLongWritable implements WritableComparable { /** Return the value of this LongWritable. */ public long get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = WritableUtils.readVLong(in); } + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVLong(out, value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java index 162374be21d..a72be58832d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java @@ -39,6 +39,7 @@ public class VersionMismatchException extends IOException { } /** Returns a string representation of this object. */ + @Override public String toString(){ return "A record version mismatch occured. Expecting v" + expectedVersion + ", found v" + foundVersion; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java index a197fd2e4f9..c2db55520c9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java @@ -40,11 +40,13 @@ public abstract class VersionedWritable implements Writable { public abstract byte getVersion(); // javadoc from Writable + @Override public void write(DataOutput out) throws IOException { out.writeByte(getVersion()); // store version } // javadoc from Writable + @Override public void readFields(DataInput in) throws IOException { byte version = in.readByte(); // read version if (version != getVersion()) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java index 6eb3a21443b..eb3c8d322c2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java @@ -120,6 +120,7 @@ public class WritableComparator implements RawComparator { * Writable#readFields(DataInput)}, then calls {@link * #compare(WritableComparable,WritableComparable)}. */ + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { try { buffer.reset(b1, s1, l1); // parse key1 @@ -144,6 +145,7 @@ public class WritableComparator implements RawComparator { return a.compareTo(b); } + @Override public int compare(Object a, Object b) { return compare((WritableComparable)a, (WritableComparable)b); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java index a7a925f35a1..35f7cb43ea0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java @@ -63,6 +63,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * @throws java.io.IOException * Throws IO exception */ + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return new BZip2CompressionOutputStream(out); @@ -74,6 +75,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * @return CompressionOutputStream @throws java.io.IOException */ + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { return createOutputStream(out); @@ -84,6 +86,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return BZip2DummyCompressor.class */ + @Override public Class getCompressorType() { return BZip2DummyCompressor.class; } @@ -93,6 +96,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return Compressor */ + @Override public Compressor createCompressor() { return new BZip2DummyCompressor(); } @@ -106,6 +110,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * @throws java.io.IOException * Throws IOException */ + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return new BZip2CompressionInputStream(in); @@ -116,6 +121,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return CompressionInputStream */ + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { return createInputStream(in); @@ -133,6 +139,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return CompressionInputStream for BZip2 aligned at block boundaries */ + @Override public SplitCompressionInputStream createInputStream(InputStream seekableIn, Decompressor decompressor, long start, long end, READ_MODE readMode) throws IOException { @@ -181,6 +188,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return BZip2DummyDecompressor.class */ + @Override public Class getDecompressorType() { return BZip2DummyDecompressor.class; } @@ -190,6 +198,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return Decompressor */ + @Override public Decompressor createDecompressor() { return new BZip2DummyDecompressor(); } @@ -199,6 +208,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * * @return A String telling the default bzip2 file extension */ + @Override public String getDefaultExtension() { return ".bz2"; } @@ -226,6 +236,7 @@ public class BZip2Codec implements SplittableCompressionCodec { } } + @Override public void finish() throws IOException { if (needsReset) { // In the case that nothing is written to this stream, we still need to @@ -245,12 +256,14 @@ public class BZip2Codec implements SplittableCompressionCodec { } } + @Override public void resetState() throws IOException { // Cannot write to out at this point because out might not be ready // yet, as in SequenceFile.Writer implementation. needsReset = true; } + @Override public void write(int b) throws IOException { if (needsReset) { internalReset(); @@ -258,6 +271,7 @@ public class BZip2Codec implements SplittableCompressionCodec { this.output.write(b); } + @Override public void write(byte[] b, int off, int len) throws IOException { if (needsReset) { internalReset(); @@ -265,6 +279,7 @@ public class BZip2Codec implements SplittableCompressionCodec { this.output.write(b, off, len); } + @Override public void close() throws IOException { if (needsReset) { // In the case that nothing is written to this stream, we still need to @@ -382,6 +397,7 @@ public class BZip2Codec implements SplittableCompressionCodec { }// end of method + @Override public void close() throws IOException { if (!needsReset) { input.close(); @@ -417,6 +433,7 @@ public class BZip2Codec implements SplittableCompressionCodec { * */ + @Override public int read(byte[] b, int off, int len) throws IOException { if (needsReset) { internalReset(); @@ -440,6 +457,7 @@ public class BZip2Codec implements SplittableCompressionCodec { } + @Override public int read() throws IOException { byte b[] = new byte[1]; int result = this.read(b, 0, 1); @@ -454,6 +472,7 @@ public class BZip2Codec implements SplittableCompressionCodec { } } + @Override public void resetState() throws IOException { // Cannot read from bufferedIn at this point because bufferedIn // might not be ready @@ -461,6 +480,7 @@ public class BZip2Codec implements SplittableCompressionCodec { needsReset = true; } + @Override public long getPos() { return this.compressedStreamPosition; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java index 5d854861f24..434183bbc25 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java @@ -78,6 +78,7 @@ public class BlockCompressorStream extends CompressorStream { * Each block contains the uncompressed length for the block, followed by * one or more length-prefixed blocks of compressed data. */ + @Override public void write(byte[] b, int off, int len) throws IOException { // Sanity checks if (compressor.finished()) { @@ -132,6 +133,7 @@ public class BlockCompressorStream extends CompressorStream { } } + @Override public void finish() throws IOException { if (!compressor.finished()) { rawWriteInt((int)compressor.getBytesRead()); @@ -142,6 +144,7 @@ public class BlockCompressorStream extends CompressorStream { } } + @Override protected void compress() throws IOException { int len = compressor.compress(buffer, 0, buffer.length); if (len > 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java index 42ade89019c..7d2504e3e22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java @@ -65,6 +65,7 @@ public class BlockDecompressorStream extends DecompressorStream { super(in); } + @Override protected int decompress(byte[] b, int off, int len) throws IOException { // Check if we are the beginning of a block if (noUncompressedBytes == originalBlockSize) { @@ -104,6 +105,7 @@ public class BlockDecompressorStream extends DecompressorStream { return n; } + @Override protected int getCompressedData() throws IOException { checkStream(); @@ -126,6 +128,7 @@ public class BlockDecompressorStream extends DecompressorStream { return len; } + @Override public void resetState() throws IOException { originalBlockSize = 0; noUncompressedBytes = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index dc95e9e999d..57fb366bdd0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -75,6 +75,7 @@ public class CompressionCodecFactory { /** * Print the extension map out as a string. */ + @Override public String toString() { StringBuilder buf = new StringBuilder(); Iterator> itr = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java index 4f7757dfedc..4491819d72c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java @@ -55,6 +55,7 @@ public abstract class CompressionInputStream extends InputStream implements Seek this.in = in; } + @Override public void close() throws IOException { in.close(); } @@ -63,6 +64,7 @@ public abstract class CompressionInputStream extends InputStream implements Seek * Read bytes from the stream. * Made abstract to prevent leakage to underlying stream. */ + @Override public abstract int read(byte[] b, int off, int len) throws IOException; /** @@ -76,6 +78,7 @@ public abstract class CompressionInputStream extends InputStream implements Seek * * @return Current position in stream as a long */ + @Override public long getPos() throws IOException { if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)){ //This way of getting the current position will not work for file @@ -95,6 +98,7 @@ public abstract class CompressionInputStream extends InputStream implements Seek * @throws UnsupportedOperationException */ + @Override public void seek(long pos) throws UnsupportedOperationException { throw new UnsupportedOperationException(); } @@ -104,6 +108,7 @@ public abstract class CompressionInputStream extends InputStream implements Seek * * @throws UnsupportedOperationException */ + @Override public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException { throw new UnsupportedOperationException(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java index b4a47946b2d..9bd6b84f988 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java @@ -44,11 +44,13 @@ public abstract class CompressionOutputStream extends OutputStream { this.out = out; } + @Override public void close() throws IOException { finish(); out.close(); } + @Override public void flush() throws IOException { out.flush(); } @@ -57,6 +59,7 @@ public abstract class CompressionOutputStream extends OutputStream { * Write compressed bytes to the stream. * Made abstract to prevent leakage to underlying stream. */ + @Override public abstract void write(byte[] b, int off, int len) throws IOException; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java index 4cd7425ba63..84f1b2f179b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java @@ -59,6 +59,7 @@ public class CompressorStream extends CompressionOutputStream { super(out); } + @Override public void write(byte[] b, int off, int len) throws IOException { // Sanity checks if (compressor.finished()) { @@ -83,6 +84,7 @@ public class CompressorStream extends CompressionOutputStream { } } + @Override public void finish() throws IOException { if (!compressor.finished()) { compressor.finish(); @@ -92,10 +94,12 @@ public class CompressorStream extends CompressionOutputStream { } } + @Override public void resetState() throws IOException { compressor.reset(); } + @Override public void close() throws IOException { if (!closed) { finish(); @@ -105,6 +109,7 @@ public class CompressorStream extends CompressionOutputStream { } private byte[] oneByte = new byte[1]; + @Override public void write(int b) throws IOException { oneByte[0] = (byte)(b & 0xff); write(oneByte, 0, oneByte.length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java index d0ef6ee6d3b..16e0ad763ac 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java @@ -66,11 +66,13 @@ public class DecompressorStream extends CompressionInputStream { } private byte[] oneByte = new byte[1]; + @Override public int read() throws IOException { checkStream(); return (read(oneByte, 0, oneByte.length) == -1) ? -1 : (oneByte[0] & 0xff); } + @Override public int read(byte[] b, int off, int len) throws IOException { checkStream(); @@ -163,11 +165,13 @@ public class DecompressorStream extends CompressionInputStream { } } + @Override public void resetState() throws IOException { decompressor.reset(); } private byte[] skipBytes = new byte[512]; + @Override public long skip(long n) throws IOException { // Sanity checks if (n < 0) { @@ -189,11 +193,13 @@ public class DecompressorStream extends CompressionInputStream { return skipped; } + @Override public int available() throws IOException { checkStream(); return (eof) ? 0 : 1; } + @Override public void close() throws IOException { if (!closed) { in.close(); @@ -201,13 +207,16 @@ public class DecompressorStream extends CompressionInputStream { } } + @Override public boolean markSupported() { return false; } + @Override public synchronized void mark(int readlimit) { } + @Override public synchronized void reset() throws IOException { throw new IOException("mark/reset not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java index 1be28bfce36..ea7df20de35 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java @@ -37,14 +37,17 @@ public class DefaultCodec implements Configurable, CompressionCodec { Configuration conf; + @Override public void setConf(Configuration conf) { this.conf = conf; } + @Override public Configuration getConf() { return conf; } + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { // This may leak memory if called in a loop. The createCompressor() call @@ -57,6 +60,7 @@ public class DefaultCodec implements Configurable, CompressionCodec { conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { @@ -64,20 +68,24 @@ public class DefaultCodec implements Configurable, CompressionCodec { conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public Class getCompressorType() { return ZlibFactory.getZlibCompressorType(conf); } + @Override public Compressor createCompressor() { return ZlibFactory.getZlibCompressor(conf); } + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return new DecompressorStream(in, createDecompressor(), conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { @@ -85,14 +93,17 @@ public class DefaultCodec implements Configurable, CompressionCodec { conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public Class getDecompressorType() { return ZlibFactory.getZlibDecompressorType(conf); } + @Override public Decompressor createDecompressor() { return ZlibFactory.getZlibDecompressor(conf); } + @Override public String getDefaultExtension() { return ".deflate"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java index b17fe4b39e5..520205e1660 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java @@ -20,15 +20,11 @@ package org.apache.hadoop.io.compress; import java.io.*; import java.util.zip.GZIPOutputStream; -import java.util.zip.GZIPInputStream; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.zlib.*; -import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel; -import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; /** * This class creates gzip compressors/decompressors. @@ -66,32 +62,39 @@ public class GzipCodec extends DefaultCodec { super(out); } + @Override public void close() throws IOException { out.close(); } + @Override public void flush() throws IOException { out.flush(); } + @Override public void write(int b) throws IOException { out.write(b); } + @Override public void write(byte[] data, int offset, int length) throws IOException { out.write(data, offset, length); } + @Override public void finish() throws IOException { ((ResetableGZIPOutputStream) out).finish(); } + @Override public void resetState() throws IOException { ((ResetableGZIPOutputStream) out).resetState(); } } + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return (ZlibFactory.isNativeZlibLoaded(conf)) ? @@ -100,6 +103,7 @@ public class GzipCodec extends DefaultCodec { new GzipOutputStream(out); } + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { @@ -110,23 +114,27 @@ public class GzipCodec extends DefaultCodec { createOutputStream(out); } + @Override public Compressor createCompressor() { return (ZlibFactory.isNativeZlibLoaded(conf)) ? new GzipZlibCompressor(conf) : null; } + @Override public Class getCompressorType() { return ZlibFactory.isNativeZlibLoaded(conf) ? GzipZlibCompressor.class : null; } + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return createInputStream(in, null); } + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { @@ -137,18 +145,21 @@ public class GzipCodec extends DefaultCodec { conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public Decompressor createDecompressor() { return (ZlibFactory.isNativeZlibLoaded(conf)) ? new GzipZlibDecompressor() : new BuiltInGzipDecompressor(); } + @Override public Class getDecompressorType() { return ZlibFactory.isNativeZlibLoaded(conf) ? GzipZlibDecompressor.class : BuiltInGzipDecompressor.class; } + @Override public String getDefaultExtension() { return ".gz"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java index 14cc9d5b825..00e892d8454 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java @@ -338,6 +338,7 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants { } + @Override public int read() throws IOException { if (this.in != null) { @@ -372,6 +373,7 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants { */ + @Override public int read(final byte[] dest, final int offs, final int len) throws IOException { if (offs < 0) { @@ -574,6 +576,7 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants { } } + @Override public void close() throws IOException { InputStream inShadow = this.in; if (inShadow != null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index 3060eb924f4..ca4e5cd0df5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -639,6 +639,7 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { init(); } + @Override public void write(final int b) throws IOException { if (this.out != null) { write0(b); @@ -704,6 +705,7 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { /** * Overriden to close the stream. */ + @Override protected void finalize() throws Throwable { finish(); super.finalize(); @@ -726,6 +728,7 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { } } + @Override public void close() throws IOException { if (out != null) { OutputStream outShadow = this.out; @@ -739,6 +742,7 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { } } + @Override public void flush() throws IOException { OutputStream outShadow = this.out; if (outShadow != null) { @@ -849,6 +853,7 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { return this.blockSize100k; } + @Override public void write(final byte[] buf, int offs, final int len) throws IOException { if (offs < 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java index 0cf65e51442..22a3118f5f9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java @@ -258,6 +258,7 @@ public class Lz4Decompressor implements Decompressor { return 0; } + @Override public synchronized void reset() { finished = false; compressedDirectBufLen = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java index baf864094e0..4620092f084 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java @@ -257,6 +257,7 @@ public class SnappyDecompressor implements Decompressor { return 0; } + @Override public synchronized void reset() { finished = false; compressedDirectBufLen = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java index 1e5525e743b..41f8036fda4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java @@ -122,7 +122,7 @@ public class BuiltInGzipDecompressor implements Decompressor { // in the first buffer load? (But how else would one do it?) } - /** {@inheritDoc} */ + @Override public synchronized boolean needsInput() { if (state == GzipStateLabel.DEFLATE_STREAM) { // most common case return inflater.needsInput(); @@ -144,6 +144,7 @@ public class BuiltInGzipDecompressor implements Decompressor { * the bulk deflate stream, which is a performance hit we don't want * to absorb. (Decompressor now documents this requirement.) */ + @Override public synchronized void setInput(byte[] b, int off, int len) { if (b == null) { throw new NullPointerException(); @@ -175,6 +176,7 @@ public class BuiltInGzipDecompressor implements Decompressor { * methods below), the deflate stream is never copied; Inflater operates * directly on the user's buffer. */ + @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { int numAvailBytes = 0; @@ -421,16 +423,17 @@ public class BuiltInGzipDecompressor implements Decompressor { * * @return the total (non-negative) number of unprocessed bytes in input */ + @Override public synchronized int getRemaining() { return userBufLen; } - /** {@inheritDoc} */ + @Override public synchronized boolean needsDictionary() { return inflater.needsDictionary(); } - /** {@inheritDoc} */ + @Override public synchronized void setDictionary(byte[] b, int off, int len) { inflater.setDictionary(b, off, len); } @@ -439,6 +442,7 @@ public class BuiltInGzipDecompressor implements Decompressor { * Returns true if the end of the gzip substream (single "member") has been * reached.

*/ + @Override public synchronized boolean finished() { return (state == GzipStateLabel.FINISHED); } @@ -447,6 +451,7 @@ public class BuiltInGzipDecompressor implements Decompressor { * Resets everything, including the input buffer, regardless of whether the * current gzip substream is finished.

*/ + @Override public synchronized void reset() { // could optionally emit INFO message if state != GzipStateLabel.FINISHED inflater.reset(); @@ -463,7 +468,7 @@ public class BuiltInGzipDecompressor implements Decompressor { hasHeaderCRC = false; } - /** {@inheritDoc} */ + @Override public synchronized void end() { inflater.end(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java index b269d557b76..509456e8347 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java @@ -48,6 +48,7 @@ public class BuiltInZlibDeflater extends Deflater implements Compressor { super(); } + @Override public synchronized int compress(byte[] b, int off, int len) throws IOException { return super.deflate(b, off, len); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java index 0223587ad01..4fda6723b86 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java @@ -39,6 +39,7 @@ public class BuiltInZlibInflater extends Inflater implements Decompressor { super(); } + @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index 8839bc98fa0..c0d0d699a55 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -259,6 +259,7 @@ public class ZlibCompressor implements Compressor { } } + @Override public synchronized void setInput(byte[] b, int off, int len) { if (b== null) { throw new NullPointerException(); @@ -287,6 +288,7 @@ public class ZlibCompressor implements Compressor { uncompressedDirectBufLen = uncompressedDirectBuf.position(); } + @Override public synchronized void setDictionary(byte[] b, int off, int len) { if (stream == 0 || b == null) { throw new NullPointerException(); @@ -297,6 +299,7 @@ public class ZlibCompressor implements Compressor { setDictionary(stream, b, off, len); } + @Override public synchronized boolean needsInput() { // Consume remaining compressed data? if (compressedDirectBuf.remaining() > 0) { @@ -325,16 +328,19 @@ public class ZlibCompressor implements Compressor { return false; } + @Override public synchronized void finish() { finish = true; } + @Override public synchronized boolean finished() { // Check if 'zlib' says its 'finished' and // all compressed data has been consumed return (finished && compressedDirectBuf.remaining() == 0); } + @Override public synchronized int compress(byte[] b, int off, int len) throws IOException { if (b == null) { @@ -385,6 +391,7 @@ public class ZlibCompressor implements Compressor { * * @return the total (non-negative) number of compressed bytes output so far */ + @Override public synchronized long getBytesWritten() { checkStream(); return getBytesWritten(stream); @@ -395,11 +402,13 @@ public class ZlibCompressor implements Compressor { * * @return the total (non-negative) number of uncompressed bytes input so far */ + @Override public synchronized long getBytesRead() { checkStream(); return getBytesRead(stream); } + @Override public synchronized void reset() { checkStream(); reset(stream); @@ -413,6 +422,7 @@ public class ZlibCompressor implements Compressor { userBufOff = userBufLen = 0; } + @Override public synchronized void end() { if (stream != 0) { end(stream); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java index 2db70551e83..ba675719983 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java @@ -118,6 +118,7 @@ public class ZlibDecompressor implements Decompressor { this(CompressionHeader.DEFAULT_HEADER, DEFAULT_DIRECT_BUFFER_SIZE); } + @Override public synchronized void setInput(byte[] b, int off, int len) { if (b == null) { throw new NullPointerException(); @@ -154,6 +155,7 @@ public class ZlibDecompressor implements Decompressor { userBufLen -= compressedDirectBufLen; } + @Override public synchronized void setDictionary(byte[] b, int off, int len) { if (stream == 0 || b == null) { throw new NullPointerException(); @@ -165,6 +167,7 @@ public class ZlibDecompressor implements Decompressor { needDict = false; } + @Override public synchronized boolean needsInput() { // Consume remaining compressed data? if (uncompressedDirectBuf.remaining() > 0) { @@ -184,16 +187,19 @@ public class ZlibDecompressor implements Decompressor { return false; } + @Override public synchronized boolean needsDictionary() { return needDict; } + @Override public synchronized boolean finished() { // Check if 'zlib' says it's 'finished' and // all compressed data has been consumed return (finished && uncompressedDirectBuf.remaining() == 0); } + @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { if (b == null) { @@ -255,6 +261,7 @@ public class ZlibDecompressor implements Decompressor { * * @return the total (non-negative) number of unprocessed bytes in input */ + @Override public synchronized int getRemaining() { checkStream(); return userBufLen + getRemaining(stream); // userBuf + compressedDirectBuf @@ -263,6 +270,7 @@ public class ZlibDecompressor implements Decompressor { /** * Resets everything including the input buffers (user and direct).

*/ + @Override public synchronized void reset() { checkStream(); reset(stream); @@ -274,6 +282,7 @@ public class ZlibDecompressor implements Decompressor { userBufOff = userBufLen = 0; } + @Override public synchronized void end() { if (stream != 0) { end(stream); @@ -281,6 +290,7 @@ public class ZlibDecompressor implements Decompressor { } } + @Override protected void finalize() { end(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java index 6b4fdd89aa2..ce932665742 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java @@ -300,6 +300,7 @@ final class BCFile { * Close the BCFile Writer. Attempting to use the Writer after calling * close is not allowed and may lead to undetermined results. */ + @Override public void close() throws IOException { if (closed == true) { return; @@ -447,6 +448,7 @@ final class BCFile { this.compressAlgo = compressAlgo; } + @Override public void register(long raw, long begin, long end) { metaIndex.addEntry(new MetaIndexEntry(name, compressAlgo, new BlockRegion(begin, end - begin, raw))); @@ -463,6 +465,7 @@ final class BCFile { // do nothing } + @Override public void register(long raw, long begin, long end) { dataIndex.addBlockRegion(new BlockRegion(begin, end - begin, raw)); } @@ -671,6 +674,7 @@ final class BCFile { /** * Finishing reading the BCFile. Release all resources. */ + @Override public void close() { // nothing to be done now } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java index a9cb1ec1c3d..0808711f89a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java @@ -68,6 +68,7 @@ class CompareUtils { magnitude = m; } + @Override public long magnitude() { return magnitude; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 0b9ed9d2b39..9a57581c90c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -297,6 +297,7 @@ public class TFile { * * The underlying FSDataOutputStream is not closed. */ + @Override public void close() throws IOException { if ((state == State.CLOSED)) { return; @@ -820,6 +821,7 @@ public class TFile { * Close the reader. The state of the Reader object is undefined after * close. Calling close() for multiple times has no effect. */ + @Override public void close() throws IOException { readerBCF.close(); } @@ -1573,6 +1575,7 @@ public class TFile { * scanner after calling close is not defined. The entry returned by the * previous entry() call will be invalid. */ + @Override public void close() throws IOException { parkCursorAtEnd(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index 2a7f883d957..4cfa0761edc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -202,6 +202,7 @@ public class NativeIO { this.mode = mode; } + @Override public String toString() { return "Stat(owner='" + owner + "', group='" + group + "'" + ", mode=" + mode + ")"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java index 5064df5d861..db653b23f42 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java @@ -38,6 +38,7 @@ public class NativeIOException extends IOException { return errno; } + @Override public String toString() { return errno.toString() + ": " + super.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index 8b8387ce2ce..5c29a33312d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -150,6 +150,7 @@ public class RetryPolicies { } static class TryOnceThenFail implements RetryPolicy { + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { return RetryAction.FAIL; @@ -157,6 +158,7 @@ public class RetryPolicies { } static class RetryForever implements RetryPolicy { + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { return RetryAction.RETRY; @@ -430,6 +432,7 @@ public class RetryPolicies { this.exceptionToPolicyMap = exceptionToPolicyMap; } + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { RetryPolicy policy = exceptionToPolicyMap.get(e.getClass()); @@ -457,6 +460,7 @@ public class RetryPolicies { } } + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { RetryPolicy policy = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java index 7e74cb77322..05205c5523c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java @@ -56,6 +56,7 @@ public abstract class DeserializerComparator implements RawComparator { this.deserializer.open(buffer); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java index 61d6f171c9c..f08d0008c6e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java @@ -24,11 +24,8 @@ import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; -import java.util.Map; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.io.RawComparator; /** *

@@ -45,6 +42,7 @@ public class JavaSerialization implements Serialization { private ObjectInputStream ois; + @Override public void open(InputStream in) throws IOException { ois = new ObjectInputStream(in) { @Override protected void readStreamHeader() { @@ -53,6 +51,7 @@ public class JavaSerialization implements Serialization { }; } + @Override @SuppressWarnings("unchecked") public T deserialize(T object) throws IOException { try { @@ -63,6 +62,7 @@ public class JavaSerialization implements Serialization { } } + @Override public void close() throws IOException { ois.close(); } @@ -74,6 +74,7 @@ public class JavaSerialization implements Serialization { private ObjectOutputStream oos; + @Override public void open(OutputStream out) throws IOException { oos = new ObjectOutputStream(out) { @Override protected void writeStreamHeader() { @@ -82,27 +83,32 @@ public class JavaSerialization implements Serialization { }; } + @Override public void serialize(Serializable object) throws IOException { oos.reset(); // clear (class) back-references oos.writeObject(object); } + @Override public void close() throws IOException { oos.close(); } } + @Override @InterfaceAudience.Private public boolean accept(Class c) { return Serializable.class.isAssignableFrom(c); } + @Override @InterfaceAudience.Private public Deserializer getDeserializer(Class c) { return new JavaSerializationDeserializer(); } + @Override @InterfaceAudience.Private public Serializer getSerializer(Class c) { return new JavaSerializationSerializer(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java index 12927bea14d..f9bf692f1fc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java @@ -44,6 +44,7 @@ public class JavaSerializationComparator> super(new JavaSerialization.JavaSerializationDeserializer()); } + @Override @InterfaceAudience.Private public int compare(T o1, T o2) { return o1.compareTo(o2); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java index 8511d25bcde..ad965d6b2f3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java @@ -23,8 +23,6 @@ import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.Map; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java index 1d5c068886c..f340cb3a98a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java @@ -47,11 +47,13 @@ public abstract class AvroSerialization extends Configured @InterfaceAudience.Private public static final String AVRO_SCHEMA_KEY = "Avro-Schema"; + @Override @InterfaceAudience.Private public Deserializer getDeserializer(Class c) { return new AvroDeserializer(c); } + @Override @InterfaceAudience.Private public Serializer getSerializer(Class c) { return new AvroSerializer(c); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index b0f5c93f757..de7af1b6b09 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -364,6 +364,7 @@ public class Client { * until a byte is read. * @throws IOException for any IO problem other than socket timeout */ + @Override public int read() throws IOException { do { try { @@ -380,6 +381,7 @@ public class Client { * * @return the total number of bytes read; -1 if the connection is closed. */ + @Override public int read(byte[] buf, int off, int len) throws IOException { do { try { @@ -510,6 +512,7 @@ public class Client { final Random rand, final UserGroupInformation ugi) throws IOException, InterruptedException { ugi.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException, InterruptedException { final short MAX_BACKOFF = 5000; closeConnection(); @@ -803,6 +806,7 @@ public class Client { } } + @Override public void run() { if (LOG.isDebugEnabled()) LOG.debug(getName() + ": starting, having connections " diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index 245835af808..062699b97bb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -240,6 +240,7 @@ public class ProtobufRpcEngine implements RpcEngine { return returnMessage; } + @Override public void close() throws IOException { if (!isClosed) { isClosed = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java index f15c0837dc8..cc66958d14e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java @@ -19,7 +19,6 @@ package org.apache.hadoop.ipc; import java.io.IOException; -import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.HashSet; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java index 1cc269f6661..69d18eacd34 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java @@ -36,7 +36,8 @@ public class ProtocolSignature implements Writable { WritableFactories.setFactory (ProtocolSignature.class, new WritableFactory() { - public Writable newInstance() { return new ProtocolSignature(); } + @Override + public Writable newInstance() { return new ProtocolSignature(); } }); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index b1d289236ca..3083ba9a41f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -479,6 +479,7 @@ public abstract class Server { this.readSelector = Selector.open(); } + @Override public void run() { LOG.info("Starting " + getName()); try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index 1420361b9f6..3c8e7f1b9e6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -142,6 +142,7 @@ public class WritableRpcEngine implements RpcEngine { return rpcVersion; } + @Override @SuppressWarnings("deprecation") public void readFields(DataInput in) throws IOException { rpcVersion = in.readLong(); @@ -159,6 +160,7 @@ public class WritableRpcEngine implements RpcEngine { } } + @Override @SuppressWarnings("deprecation") public void write(DataOutput out) throws IOException { out.writeLong(rpcVersion); @@ -173,6 +175,7 @@ public class WritableRpcEngine implements RpcEngine { } } + @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer.append(methodName); @@ -189,10 +192,12 @@ public class WritableRpcEngine implements RpcEngine { return buffer.toString(); } + @Override public void setConf(Configuration conf) { this.conf = conf; } + @Override public Configuration getConf() { return this.conf; } @@ -215,6 +220,7 @@ public class WritableRpcEngine implements RpcEngine { this.client = CLIENTS.getClient(conf, factory); } + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { long startTime = 0; @@ -232,6 +238,7 @@ public class WritableRpcEngine implements RpcEngine { } /* close the IPC client that's responsible for this invoker's RPCs */ + @Override synchronized public void close() { if (!isClosed) { isClosed = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java index 775279dfb4b..5e2ed532556 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java @@ -88,6 +88,7 @@ public class LogLevel { public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; + @Override public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java index 18dc7a0da73..a7177a6b2aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java @@ -35,7 +35,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; -import org.apache.hadoop.metrics.MetricsException; import org.apache.hadoop.metrics.spi.AbstractMetricsContext; import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.Util; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java index 0c9fb4e1522..b78d8a5b6fe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java @@ -30,7 +30,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; import org.apache.hadoop.metrics.MetricsContext; -import org.apache.hadoop.metrics.MetricsException; import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.Updater; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java index c3210e31cb5..4504db0ed0e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java @@ -21,7 +21,6 @@ package org.apache.hadoop.metrics.spi; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; -import org.apache.hadoop.metrics.MetricsException; /** * A null context which has a thread calling diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java index fa2c09217f0..d94c8ab46e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java @@ -21,11 +21,7 @@ package org.apache.hadoop.metrics.spi; import java.util.Collections; -import java.util.Map; import java.util.Set; -import java.util.TreeMap; -import java.util.Map.Entry; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java index 166a846fdf9..32aa431991b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java @@ -22,7 +22,6 @@ package org.apache.hadoop.metrics.spi; import java.net.InetSocketAddress; -import java.net.SocketAddress; import java.util.ArrayList; import java.util.List; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java index 5f89008e793..82d83b03b8e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java @@ -19,8 +19,6 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java index a427142af29..d3e5bd20fef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java @@ -19,8 +19,6 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java index 049411d50a6..e74883f264a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java @@ -20,8 +20,6 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java index f77256d7a1b..a4e76714231 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java @@ -19,8 +19,6 @@ package org.apache.hadoop.metrics.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java index 5d8329ac402..67809090af0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java @@ -24,7 +24,6 @@ import java.net.URL; import java.net.URLClassLoader; import static java.security.AccessController.*; import java.security.PrivilegedAction; -import java.util.Arrays; import java.util.Iterator; import java.util.Locale; import java.util.Map; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java index 980351d6cb7..8d4ce183d22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java @@ -18,7 +18,6 @@ package org.apache.hadoop.metrics2.sink; -import java.io.BufferedOutputStream; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java index 852f9132d3a..f436a667f90 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java @@ -23,7 +23,6 @@ import java.lang.management.MemoryMXBean; import java.lang.management.MemoryUsage; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; -import static java.lang.Thread.State.*; import java.lang.management.GarbageCollectorMXBean; import java.util.Map; import java.util.List; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java index b1f0fb230f8..b2d803c95f5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java @@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; -import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java index 0a9ff7301bf..73bbd4a6314 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java @@ -30,7 +30,6 @@ import java.net.UnknownHostException; import java.util.Collections; import java.util.Enumeration; import java.util.LinkedHashSet; -import java.util.Set; import java.util.Vector; import javax.naming.NamingException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java index da8fab2956c..c71693d4e59 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java @@ -621,6 +621,7 @@ public class NetworkTopology { } /** convert a network tree to a string */ + @Override public String toString() { // print the number of racks StringBuilder tree = new StringBuilder(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java index a41a42463c7..b8502d016b0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java @@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java index 62076b191b6..ed12b3c6be0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java @@ -33,7 +33,6 @@ import java.util.LinkedList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java index a0b0c3ed0f9..46039a5506e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java @@ -50,6 +50,7 @@ class SocketInputStream extends InputStream this.channel = channel; } + @Override int performIO(ByteBuffer buf) throws IOException { return channel.read(buf); } @@ -123,10 +124,12 @@ class SocketInputStream extends InputStream return ret; } + @Override public int read(byte[] b, int off, int len) throws IOException { return read(ByteBuffer.wrap(b, off, len)); } + @Override public synchronized void close() throws IOException { /* close the channel since Socket.getInputStream().close() * closes the socket. @@ -146,10 +149,12 @@ class SocketInputStream extends InputStream //ReadableByteChannel interface + @Override public boolean isOpen() { return reader.isOpen(); } + @Override public int read(ByteBuffer dst) throws IOException { return reader.doIO(dst, SelectionKey.OP_READ); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java index e7bfadeeda9..091c684059b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java @@ -58,6 +58,7 @@ public class SocketOutputStream extends OutputStream this.channel = channel; } + @Override int performIO(ByteBuffer buf) throws IOException { return channel.write(buf); } @@ -98,6 +99,7 @@ public class SocketOutputStream extends OutputStream this(socket.getChannel(), timeout); } + @Override public void write(int b) throws IOException { /* If we need to, we can optimize this allocation. * probably no need to optimize or encourage single byte writes. @@ -107,6 +109,7 @@ public class SocketOutputStream extends OutputStream write(buf, 0, 1); } + @Override public void write(byte[] b, int off, int len) throws IOException { ByteBuffer buf = ByteBuffer.wrap(b, off, len); while (buf.hasRemaining()) { @@ -126,6 +129,7 @@ public class SocketOutputStream extends OutputStream } } + @Override public synchronized void close() throws IOException { /* close the channel since Socket.getOuputStream().close() * closes the socket. @@ -145,10 +149,12 @@ public class SocketOutputStream extends OutputStream //WritableByteChannle interface + @Override public boolean isOpen() { return writer.isOpen(); } + @Override public int write(ByteBuffer src) throws IOException { return writer.doIO(src, SelectionKey.OP_WRITE); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java index 0192aa4cbc8..6b84f9d2cf3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java @@ -59,14 +59,12 @@ public class SocksSocketFactory extends SocketFactory implements this.proxy = proxy; } - /* @inheritDoc */ @Override public Socket createSocket() throws IOException { return new Socket(proxy); } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port) throws IOException { @@ -75,7 +73,6 @@ public class SocksSocketFactory extends SocketFactory implements return socket; } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port, InetAddress localHostAddr, int localPort) throws IOException { @@ -86,7 +83,6 @@ public class SocksSocketFactory extends SocketFactory implements return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port) throws IOException, UnknownHostException { @@ -96,7 +92,6 @@ public class SocksSocketFactory extends SocketFactory implements return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port, InetAddress localHostAddr, int localPort) throws IOException, @@ -108,13 +103,11 @@ public class SocksSocketFactory extends SocketFactory implements return socket; } - /* @inheritDoc */ @Override public int hashCode() { return proxy.hashCode(); } - /* @inheritDoc */ @Override public boolean equals(Object obj) { if (this == obj) @@ -132,12 +125,12 @@ public class SocksSocketFactory extends SocketFactory implements return true; } - /* @inheritDoc */ + @Override public Configuration getConf() { return this.conf; } - /* @inheritDoc */ + @Override public void setConf(Configuration conf) { this.conf = conf; String proxyStr = conf.get("hadoop.socks.server"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java index f4942cef261..ac388193605 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java @@ -42,7 +42,6 @@ public class StandardSocketFactory extends SocketFactory { public StandardSocketFactory() { } - /* @inheritDoc */ @Override public Socket createSocket() throws IOException { /* @@ -63,7 +62,6 @@ public class StandardSocketFactory extends SocketFactory { return SocketChannel.open().socket(); } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port) throws IOException { @@ -72,7 +70,6 @@ public class StandardSocketFactory extends SocketFactory { return socket; } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port, InetAddress localHostAddr, int localPort) throws IOException { @@ -83,7 +80,6 @@ public class StandardSocketFactory extends SocketFactory { return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port) throws IOException, UnknownHostException { @@ -93,7 +89,6 @@ public class StandardSocketFactory extends SocketFactory { return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port, InetAddress localHostAddr, int localPort) throws IOException, @@ -105,7 +100,6 @@ public class StandardSocketFactory extends SocketFactory { return socket; } - /* @inheritDoc */ @Override public boolean equals(Object obj) { if (this == obj) @@ -115,7 +109,6 @@ public class StandardSocketFactory extends SocketFactory { return obj.getClass().equals(this.getClass()); } - /* @inheritDoc */ @Override public int hashCode() { return this.getClass().hashCode(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java index 277432bf14f..b245c80969a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java @@ -125,6 +125,7 @@ public class TableMapping extends CachedDNSToSwitchMapping { } } + @Override public synchronized List resolve(List names) { if (!initialized) { initialized = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java index d4c60e08d6a..5c302e55e3c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java @@ -41,9 +41,11 @@ public class BinaryRecordInput implements RecordInput { private BinaryIndex(int nelems) { this.nelems = nelems; } + @Override public boolean done() { return (nelems <= 0); } + @Override public void incr() { nelems--; } @@ -56,6 +58,7 @@ public class BinaryRecordInput implements RecordInput { } private static ThreadLocal bIn = new ThreadLocal() { + @Override protected synchronized Object initialValue() { return new BinaryRecordInput(); } @@ -82,34 +85,42 @@ public class BinaryRecordInput implements RecordInput { this.in = din; } + @Override public byte readByte(final String tag) throws IOException { return in.readByte(); } + @Override public boolean readBool(final String tag) throws IOException { return in.readBoolean(); } + @Override public int readInt(final String tag) throws IOException { return Utils.readVInt(in); } + @Override public long readLong(final String tag) throws IOException { return Utils.readVLong(in); } + @Override public float readFloat(final String tag) throws IOException { return in.readFloat(); } + @Override public double readDouble(final String tag) throws IOException { return in.readDouble(); } + @Override public String readString(final String tag) throws IOException { return Utils.fromBinaryString(in); } + @Override public Buffer readBuffer(final String tag) throws IOException { final int len = Utils.readVInt(in); final byte[] barr = new byte[len]; @@ -117,26 +128,32 @@ public class BinaryRecordInput implements RecordInput { return new Buffer(barr); } + @Override public void startRecord(final String tag) throws IOException { // no-op } + @Override public void endRecord(final String tag) throws IOException { // no-op } + @Override public Index startVector(final String tag) throws IOException { return new BinaryIndex(readInt(tag)); } + @Override public void endVector(final String tag) throws IOException { // no-op } + @Override public Index startMap(final String tag) throws IOException { return new BinaryIndex(readInt(tag)); } + @Override public void endMap(final String tag) throws IOException { // no-op } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java index 699f635e680..aa6b8e95c53 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java @@ -45,6 +45,7 @@ public class BinaryRecordOutput implements RecordOutput { } private static ThreadLocal bOut = new ThreadLocal() { + @Override protected synchronized Object initialValue() { return new BinaryRecordOutput(); } @@ -72,34 +73,42 @@ public class BinaryRecordOutput implements RecordOutput { } + @Override public void writeByte(byte b, String tag) throws IOException { out.writeByte(b); } + @Override public void writeBool(boolean b, String tag) throws IOException { out.writeBoolean(b); } + @Override public void writeInt(int i, String tag) throws IOException { Utils.writeVInt(out, i); } + @Override public void writeLong(long l, String tag) throws IOException { Utils.writeVLong(out, l); } + @Override public void writeFloat(float f, String tag) throws IOException { out.writeFloat(f); } + @Override public void writeDouble(double d, String tag) throws IOException { out.writeDouble(d); } + @Override public void writeString(String s, String tag) throws IOException { Utils.toBinaryString(out, s); } + @Override public void writeBuffer(Buffer buf, String tag) throws IOException { byte[] barr = buf.get(); @@ -108,20 +117,26 @@ public class BinaryRecordOutput implements RecordOutput { out.write(barr, 0, len); } + @Override public void startRecord(Record r, String tag) throws IOException {} + @Override public void endRecord(Record r, String tag) throws IOException {} + @Override public void startVector(ArrayList v, String tag) throws IOException { writeInt(v.size(), tag); } + @Override public void endVector(ArrayList v, String tag) throws IOException {} + @Override public void startMap(TreeMap v, String tag) throws IOException { writeInt(v.size(), tag); } + @Override public void endMap(TreeMap v, String tag) throws IOException {} } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java index 5f6630852b9..eb569271d27 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java @@ -187,6 +187,7 @@ public class Buffer implements Comparable, Cloneable { } // inherit javadoc + @Override public int hashCode() { int hash = 1; byte[] b = this.get(); @@ -202,6 +203,7 @@ public class Buffer implements Comparable, Cloneable { * @return Positive if this is bigger than other, 0 if they are equal, and * negative if this is smaller than other. */ + @Override public int compareTo(Object other) { Buffer right = ((Buffer) other); byte[] lb = this.get(); @@ -217,6 +219,7 @@ public class Buffer implements Comparable, Cloneable { } // inherit javadoc + @Override public boolean equals(Object other) { if (other instanceof Buffer && this != other) { return compareTo(other) == 0; @@ -225,6 +228,7 @@ public class Buffer implements Comparable, Cloneable { } // inheric javadoc + @Override public String toString() { StringBuilder sb = new StringBuilder(2*count); for(int idx = 0; idx < count; idx++) { @@ -245,6 +249,7 @@ public class Buffer implements Comparable, Cloneable { } // inherit javadoc + @Override public Object clone() throws CloneNotSupportedException { Buffer result = (Buffer) super.clone(); result.copy(this.get(), 0, this.getCount()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java index 44093573c9e..e9fa0c35b89 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java @@ -38,6 +38,7 @@ public class CsvRecordInput implements RecordInput { private PushbackReader stream; private class CsvIndex implements Index { + @Override public boolean done() { char c = '\0'; try { @@ -47,6 +48,7 @@ public class CsvRecordInput implements RecordInput { } return (c == '}') ? true : false; } + @Override public void incr() {} } @@ -85,19 +87,23 @@ public class CsvRecordInput implements RecordInput { } } + @Override public byte readByte(String tag) throws IOException { return (byte) readLong(tag); } + @Override public boolean readBool(String tag) throws IOException { String sval = readField(tag); return "T".equals(sval) ? true : false; } + @Override public int readInt(String tag) throws IOException { return (int) readLong(tag); } + @Override public long readLong(String tag) throws IOException { String sval = readField(tag); try { @@ -108,10 +114,12 @@ public class CsvRecordInput implements RecordInput { } } + @Override public float readFloat(String tag) throws IOException { return (float) readDouble(tag); } + @Override public double readDouble(String tag) throws IOException { String sval = readField(tag); try { @@ -122,16 +130,19 @@ public class CsvRecordInput implements RecordInput { } } + @Override public String readString(String tag) throws IOException { String sval = readField(tag); return Utils.fromCSVString(sval); } + @Override public Buffer readBuffer(String tag) throws IOException { String sval = readField(tag); return Utils.fromCSVBuffer(sval); } + @Override public void startRecord(String tag) throws IOException { if (tag != null && !"".equals(tag)) { char c1 = (char) stream.read(); @@ -142,6 +153,7 @@ public class CsvRecordInput implements RecordInput { } } + @Override public void endRecord(String tag) throws IOException { char c = (char) stream.read(); if (tag == null || "".equals(tag)) { @@ -163,6 +175,7 @@ public class CsvRecordInput implements RecordInput { return; } + @Override public Index startVector(String tag) throws IOException { char c1 = (char) stream.read(); char c2 = (char) stream.read(); @@ -172,6 +185,7 @@ public class CsvRecordInput implements RecordInput { return new CsvIndex(); } + @Override public void endVector(String tag) throws IOException { char c = (char) stream.read(); if (c != '}') { @@ -184,6 +198,7 @@ public class CsvRecordInput implements RecordInput { return; } + @Override public Index startMap(String tag) throws IOException { char c1 = (char) stream.read(); char c2 = (char) stream.read(); @@ -193,6 +208,7 @@ public class CsvRecordInput implements RecordInput { return new CsvIndex(); } + @Override public void endMap(String tag) throws IOException { char c = (char) stream.read(); if (c != '}') { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java index a638d0bcc9c..d770f47cf5c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java @@ -61,10 +61,12 @@ public class CsvRecordOutput implements RecordOutput { } } + @Override public void writeByte(byte b, String tag) throws IOException { writeLong((long)b, tag); } + @Override public void writeBool(boolean b, String tag) throws IOException { printCommaUnlessFirst(); String val = b ? "T" : "F"; @@ -72,32 +74,38 @@ public class CsvRecordOutput implements RecordOutput { throwExceptionOnError(tag); } + @Override public void writeInt(int i, String tag) throws IOException { writeLong((long)i, tag); } + @Override public void writeLong(long l, String tag) throws IOException { printCommaUnlessFirst(); stream.print(l); throwExceptionOnError(tag); } + @Override public void writeFloat(float f, String tag) throws IOException { writeDouble((double)f, tag); } + @Override public void writeDouble(double d, String tag) throws IOException { printCommaUnlessFirst(); stream.print(d); throwExceptionOnError(tag); } + @Override public void writeString(String s, String tag) throws IOException { printCommaUnlessFirst(); stream.print(Utils.toCSVString(s)); throwExceptionOnError(tag); } + @Override public void writeBuffer(Buffer buf, String tag) throws IOException { printCommaUnlessFirst(); @@ -105,6 +113,7 @@ public class CsvRecordOutput implements RecordOutput { throwExceptionOnError(tag); } + @Override public void startRecord(Record r, String tag) throws IOException { if (tag != null && !"".equals(tag)) { printCommaUnlessFirst(); @@ -113,6 +122,7 @@ public class CsvRecordOutput implements RecordOutput { } } + @Override public void endRecord(Record r, String tag) throws IOException { if (tag == null || "".equals(tag)) { stream.print("\n"); @@ -123,23 +133,27 @@ public class CsvRecordOutput implements RecordOutput { } } + @Override public void startVector(ArrayList v, String tag) throws IOException { printCommaUnlessFirst(); stream.print("v{"); isFirst = true; } + @Override public void endVector(ArrayList v, String tag) throws IOException { stream.print("}"); isFirst = false; } + @Override public void startMap(TreeMap v, String tag) throws IOException { printCommaUnlessFirst(); stream.print("m{"); isFirst = true; } + @Override public void endMap(TreeMap v, String tag) throws IOException { stream.print("}"); isFirst = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java index df0b6c2c852..f0ec99ad814 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java @@ -54,6 +54,7 @@ public abstract class Record implements WritableComparable, Cloneable { throws IOException; // inheric javadoc + @Override public abstract int compareTo (final Object peer) throws ClassCastException; /** @@ -73,18 +74,21 @@ public abstract class Record implements WritableComparable, Cloneable { } // inherit javadoc + @Override public void write(final DataOutput out) throws java.io.IOException { BinaryRecordOutput bout = BinaryRecordOutput.get(out); this.serialize(bout); } // inherit javadoc + @Override public void readFields(final DataInput din) throws java.io.IOException { BinaryRecordInput rin = BinaryRecordInput.get(din); this.deserialize(rin); } // inherit javadoc + @Override public String toString() { try { ByteArrayOutputStream s = new ByteArrayOutputStream(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java index 2c86804c14e..805d93160a8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java @@ -40,6 +40,7 @@ public abstract class RecordComparator extends WritableComparator { } // inheric JavaDoc + @Override public abstract int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2); /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java index 5e77a4552b4..871e04bff0b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java @@ -63,10 +63,13 @@ public class XmlRecordInput implements RecordInput { valList = vlist; } + @Override public void startDocument() throws SAXException {} + @Override public void endDocument() throws SAXException {} + @Override public void startElement(String ns, String sname, String qname, @@ -88,6 +91,7 @@ public class XmlRecordInput implements RecordInput { } } + @Override public void endElement(String ns, String sname, String qname) throws SAXException { @@ -98,6 +102,7 @@ public class XmlRecordInput implements RecordInput { } } + @Override public void characters(char buf[], int offset, int len) throws SAXException { if (charsValid) { @@ -109,6 +114,7 @@ public class XmlRecordInput implements RecordInput { } private class XmlIndex implements Index { + @Override public boolean done() { Value v = valList.get(vIdx); if ("/array".equals(v.getType())) { @@ -119,6 +125,7 @@ public class XmlRecordInput implements RecordInput { return false; } } + @Override public void incr() {} } @@ -152,6 +159,7 @@ public class XmlRecordInput implements RecordInput { } } + @Override public byte readByte(String tag) throws IOException { Value v = next(); if (!"ex:i1".equals(v.getType())) { @@ -160,6 +168,7 @@ public class XmlRecordInput implements RecordInput { return Byte.parseByte(v.getValue()); } + @Override public boolean readBool(String tag) throws IOException { Value v = next(); if (!"boolean".equals(v.getType())) { @@ -168,6 +177,7 @@ public class XmlRecordInput implements RecordInput { return "1".equals(v.getValue()); } + @Override public int readInt(String tag) throws IOException { Value v = next(); if (!"i4".equals(v.getType()) && @@ -177,6 +187,7 @@ public class XmlRecordInput implements RecordInput { return Integer.parseInt(v.getValue()); } + @Override public long readLong(String tag) throws IOException { Value v = next(); if (!"ex:i8".equals(v.getType())) { @@ -185,6 +196,7 @@ public class XmlRecordInput implements RecordInput { return Long.parseLong(v.getValue()); } + @Override public float readFloat(String tag) throws IOException { Value v = next(); if (!"ex:float".equals(v.getType())) { @@ -193,6 +205,7 @@ public class XmlRecordInput implements RecordInput { return Float.parseFloat(v.getValue()); } + @Override public double readDouble(String tag) throws IOException { Value v = next(); if (!"double".equals(v.getType())) { @@ -201,6 +214,7 @@ public class XmlRecordInput implements RecordInput { return Double.parseDouble(v.getValue()); } + @Override public String readString(String tag) throws IOException { Value v = next(); if (!"string".equals(v.getType())) { @@ -209,6 +223,7 @@ public class XmlRecordInput implements RecordInput { return Utils.fromXMLString(v.getValue()); } + @Override public Buffer readBuffer(String tag) throws IOException { Value v = next(); if (!"string".equals(v.getType())) { @@ -217,6 +232,7 @@ public class XmlRecordInput implements RecordInput { return Utils.fromXMLBuffer(v.getValue()); } + @Override public void startRecord(String tag) throws IOException { Value v = next(); if (!"struct".equals(v.getType())) { @@ -224,6 +240,7 @@ public class XmlRecordInput implements RecordInput { } } + @Override public void endRecord(String tag) throws IOException { Value v = next(); if (!"/struct".equals(v.getType())) { @@ -231,6 +248,7 @@ public class XmlRecordInput implements RecordInput { } } + @Override public Index startVector(String tag) throws IOException { Value v = next(); if (!"array".equals(v.getType())) { @@ -239,12 +257,15 @@ public class XmlRecordInput implements RecordInput { return new XmlIndex(); } + @Override public void endVector(String tag) throws IOException {} + @Override public Index startMap(String tag) throws IOException { return startVector(tag); } + @Override public void endMap(String tag) throws IOException { endVector(tag); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java index adf28a2fdf7..9cf85537ed4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java @@ -149,6 +149,7 @@ public class XmlRecordOutput implements RecordOutput { } } + @Override public void writeByte(byte b, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -157,6 +158,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeBool(boolean b, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -165,6 +167,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeInt(int i, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -173,6 +176,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeLong(long l, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -181,6 +185,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeFloat(float f, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -189,6 +194,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeDouble(double d, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -197,6 +203,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeString(String s, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -205,6 +212,7 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void writeBuffer(Buffer buf, String tag) throws IOException { printBeginEnvelope(tag); @@ -214,12 +222,14 @@ public class XmlRecordOutput implements RecordOutput { printEndEnvelope(tag); } + @Override public void startRecord(Record r, String tag) throws IOException { insideRecord(tag); stream.print("\n"); addIndent(); } + @Override public void endRecord(Record r, String tag) throws IOException { closeIndent(); putIndent(); @@ -227,12 +237,14 @@ public class XmlRecordOutput implements RecordOutput { outsideRecord(tag); } + @Override public void startVector(ArrayList v, String tag) throws IOException { insideVector(tag); stream.print("\n"); addIndent(); } + @Override public void endVector(ArrayList v, String tag) throws IOException { closeIndent(); putIndent(); @@ -240,12 +252,14 @@ public class XmlRecordOutput implements RecordOutput { outsideVector(tag); } + @Override public void startMap(TreeMap v, String tag) throws IOException { insideMap(tag); stream.print("\n"); addIndent(); } + @Override public void endMap(TreeMap v, String tag) throws IOException { closeIndent(); putIndent(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java index b62b62924bf..69ab37a1522 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java @@ -37,6 +37,7 @@ class CGenerator extends CodeGenerator { * and spits-out file-level elements (such as include statements etc.) * record-level code is generated by JRecord. */ + @Override void genCode(String name, ArrayList ilist, ArrayList rlist, String destDir, ArrayList options) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java index a18871c74a2..ec4d5df981d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java @@ -98,6 +98,7 @@ public class CodeBuffer { sb.append(ch); } + @Override public String toString() { return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java index 7f3e8d74dfe..2a186fa2833 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java @@ -18,12 +18,8 @@ package org.apache.hadoop.record.compiler; -import java.io.IOException; -import java.util.Iterator; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.record.RecordInput; /** * const definitions for Record I/O compiler diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java index e1fb599c049..1c97a48d537 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java @@ -37,6 +37,7 @@ class CppGenerator extends CodeGenerator { * and spits-out file-level elements (such as include statements etc.) * record-level code is generated by JRecord. */ + @Override void genCode(String name, ArrayList ilist, ArrayList rlist, String destDir, ArrayList options) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java index 77791f27231..95dc7334f7e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java @@ -36,20 +36,24 @@ public class JBoolean extends JType { super("boolean", "Bool", "Boolean", "TypeID.RIOType.BOOL"); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 : ("+ fname+"?1:-1);\n"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.BoolTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = ("+fname+")?0:1;\n"); } // In Binary format, boolean is written as byte. true = 1, false = 0 + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<1) {\n"); @@ -61,6 +65,7 @@ public class JBoolean extends JType { } // In Binary format, boolean is written as byte. true = 1, false = 0 + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<1 || l2<1) {\n"); @@ -81,6 +86,7 @@ public class JBoolean extends JType { super("bool"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BOOL)"; } @@ -93,6 +99,7 @@ public class JBoolean extends JType { setCType(new CType()); } + @Override String getSignature() { return "z"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java index 53b8264e3e6..7c4a6706070 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java @@ -39,22 +39,27 @@ public class JBuffer extends JCompType { "org.apache.hadoop.record.Buffer", "TypeID.RIOType.BUFFER"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.BufferTypeID"; } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n"); } + @Override void genEquals(CodeBuffer cb, String fname, String peer) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n"); } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+ @@ -64,6 +69,7 @@ public class JBuffer extends JCompType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); @@ -84,6 +90,7 @@ public class JBuffer extends JCompType { super(" ::std::string"); } + @Override void genGetSet(CodeBuffer cb, String fname) { cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n"); cb.append("return "+fname+";\n"); @@ -93,6 +100,7 @@ public class JBuffer extends JCompType { cb.append("}\n"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BUFFER)"; } @@ -105,6 +113,7 @@ public class JBuffer extends JCompType { setCType(new CCompType()); } + @Override String getSignature() { return "B"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java index 96f191d5fb7..1ac7171f241 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java @@ -37,10 +37,12 @@ public class JByte extends JType { super("byte", "Byte", "Byte", "TypeID.RIOType.BYTE"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.ByteTypeID"; } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<1) {\n"); @@ -51,6 +53,7 @@ public class JByte extends JType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<1 || l2<1) {\n"); @@ -71,6 +74,7 @@ public class JByte extends JType { super("int8_t"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BYTE)"; } @@ -82,6 +86,7 @@ public class JByte extends JType { setCType(new CType()); } + @Override String getSignature() { return "b"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java index f71d97d50c7..5f41aeb56fc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java @@ -35,18 +35,22 @@ abstract class JCompType extends JType { super(type, suffix, wrapper, typeIDByteString); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n"); } + @Override void genEquals(CodeBuffer cb, String fname, String peer) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n"); } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n"); } + @Override void genClone(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "other."+fname+" = ("+getType()+") this."+ fname+".clone();\n"); @@ -59,6 +63,7 @@ abstract class JCompType extends JType { super(type); } + @Override void genGetSet(CodeBuffer cb, String fname) { cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n"); cb.append("return "+fname+";\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java index cdab26341d4..5e1a65fc071 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java @@ -36,15 +36,18 @@ public class JDouble extends JType { super("double", "Double", "Double", "TypeID.RIOType.DOUBLE"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.DoubleTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { String tmp = "Double.doubleToLongBits("+fname+")"; cb.append(Consts.RIO_PREFIX + "ret = (int)("+tmp+"^("+tmp+">>>32));\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<8) {\n"); @@ -55,6 +58,7 @@ public class JDouble extends JType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<8 || l2<8) {\n"); @@ -77,6 +81,7 @@ public class JDouble extends JType { super("double"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_DOUBLE)"; } @@ -90,6 +95,7 @@ public class JDouble extends JType { setCType(new CType()); } + @Override String getSignature() { return "d"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java index 1081651be65..10aa69ad2f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java @@ -35,14 +35,17 @@ public class JFloat extends JType { super("float", "Float", "Float", "TypeID.RIOType.FLOAT"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.FloatTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = Float.floatToIntBits("+fname+");\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<4) {\n"); @@ -53,6 +56,7 @@ public class JFloat extends JType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<4 || l2<4) {\n"); @@ -75,6 +79,7 @@ public class JFloat extends JType { super("float"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_FLOAT)"; } @@ -87,6 +92,7 @@ public class JFloat extends JType { setCType(new CType()); } + @Override String getSignature() { return "f"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java index b1303e44bdd..d18445ff55c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java @@ -38,10 +38,12 @@ public class JInt extends JType { super("int", "Int", "Integer", "TypeID.RIOType.INT"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.IntTypeID"; } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n"); @@ -50,6 +52,7 @@ public class JInt extends JType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); @@ -70,6 +73,7 @@ public class JInt extends JType { super("int32_t"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_INT)"; } @@ -82,6 +86,7 @@ public class JInt extends JType { setCType(new CType()); } + @Override String getSignature() { return "i"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java index ca09f053b46..f540fc808d8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java @@ -37,15 +37,18 @@ public class JLong extends JType { super("long", "Long", "Long", "TypeID.RIOType.LONG"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.LongTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = (int) ("+fname+"^("+ fname+">>>32));\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("long i = org.apache.hadoop.record.Utils.readVLong("+b+", "+s+");\n"); @@ -54,6 +57,7 @@ public class JLong extends JType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);\n"); @@ -74,6 +78,7 @@ public class JLong extends JType { super("int64_t"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_LONG)"; } @@ -86,6 +91,7 @@ public class JLong extends JType { setCType(new CType()); } + @Override String getSignature() { return "l"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java index 4758accb51d..03dcad322ea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java @@ -59,17 +59,20 @@ public class JMap extends JCompType { this.value = value; } + @Override String getTypeIDObjectString() { return "new org.apache.hadoop.record.meta.MapTypeID(" + key.getTypeIDObjectString() + ", " + value.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) { key.genSetRTIFilter(cb, nestedStructMap); value.genSetRTIFilter(cb, nestedStructMap); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { String setType = "java.util.Set<"+key.getWrapperType()+"> "; String iterType = "java.util.Iterator<"+key.getWrapperType()+"> "; @@ -98,6 +101,7 @@ public class JMap extends JCompType { cb.append("}\n"); } + @Override void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { if (decl) { cb.append(getType()+" "+fname+";\n"); @@ -122,6 +126,7 @@ public class JMap extends JCompType { cb.append("}\n"); } + @Override void genWriteMethod(CodeBuffer cb, String fname, String tag) { String setType = "java.util.Set> "; @@ -153,6 +158,7 @@ public class JMap extends JCompType { decrLevel(); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); incrLevel(); @@ -170,6 +176,7 @@ public class JMap extends JCompType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); incrLevel(); @@ -208,12 +215,14 @@ public class JMap extends JCompType { this.value = value; } + @Override String getTypeIDObjectString() { return "new ::hadoop::MapTypeID(" + key.getTypeIDObjectString() + ", " + value.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb) { key.genSetRTIFilter(cb); value.genSetRTIFilter(cb); @@ -230,6 +239,7 @@ public class JMap extends JCompType { valueType = t2; } + @Override String getSignature() { return "{" + keyType.getSignature() + valueType.getSignature() +"}"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java index 647d3a7baa4..80e545ba3ab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java @@ -54,11 +54,13 @@ public class JRecord extends JCompType { } } + @Override String getTypeIDObjectString() { return "new org.apache.hadoop.record.meta.StructTypeID(" + fullName + ".getTypeInfo())"; } + @Override void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) { // ignore, if we'ev already set the type filter for this record if (!nestedStructMap.containsKey(fullName)) { @@ -129,6 +131,7 @@ public class JRecord extends JCompType { cb.append("}\n"); } + @Override void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { if (decl) { cb.append(fullName+" "+fname+";\n"); @@ -137,10 +140,12 @@ public class JRecord extends JCompType { cb.append(fname+".deserialize(" + Consts.RECORD_INPUT + ",\""+tag+"\");\n"); } + @Override void genWriteMethod(CodeBuffer cb, String fname, String tag) { cb.append(fname+".serialize(" + Consts.RECORD_OUTPUT + ",\""+tag+"\");\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int r = "+fullName+ @@ -149,6 +154,7 @@ public class JRecord extends JCompType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int r1 = "+fullName+ @@ -492,6 +498,7 @@ public class JRecord extends JCompType { } } + @Override String getTypeIDObjectString() { return "new ::hadoop::StructTypeID(" + fullName + "::getTypeInfo().getFieldTypeInfos())"; @@ -501,6 +508,7 @@ public class JRecord extends JCompType { return " "+name+" "+fname+";\n"; } + @Override void genSetRTIFilter(CodeBuffer cb) { // we set the RTI filter here cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+ @@ -797,6 +805,7 @@ public class JRecord extends JCompType { signature = sb.toString(); } + @Override String getSignature() { return signature; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java index 5c712e963c5..cd3ab3dc354 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java @@ -36,10 +36,12 @@ public class JString extends JCompType { super("String", "String", "String", "TypeID.RIOType.STRING"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.StringTypeID"; } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n"); @@ -48,6 +50,7 @@ public class JString extends JCompType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); @@ -61,6 +64,7 @@ public class JString extends JCompType { cb.append("}\n"); } + @Override void genClone(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n"); } @@ -72,6 +76,7 @@ public class JString extends JCompType { super("::std::string"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_STRING)"; } @@ -84,6 +89,7 @@ public class JString extends JCompType { setCType(new CCompType()); } + @Override String getSignature() { return "s"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java index 8bce5cc91fb..46ecbada51b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java @@ -54,15 +54,18 @@ public class JVector extends JCompType { element = t; } + @Override String getTypeIDObjectString() { return "new org.apache.hadoop.record.meta.VectorTypeID(" + element.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) { element.genSetRTIFilter(cb, nestedStructMap); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append("{\n"); incrLevel(); @@ -92,6 +95,7 @@ public class JVector extends JCompType { cb.append("}\n"); } + @Override void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { if (decl) { cb.append(getType()+" "+fname+";\n"); @@ -113,6 +117,7 @@ public class JVector extends JCompType { cb.append("}\n"); } + @Override void genWriteMethod(CodeBuffer cb, String fname, String tag) { cb.append("{\n"); incrLevel(); @@ -131,6 +136,7 @@ public class JVector extends JCompType { decrLevel(); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); incrLevel(); @@ -146,6 +152,7 @@ public class JVector extends JCompType { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); incrLevel(); @@ -179,11 +186,13 @@ public class JVector extends JCompType { element = t; } + @Override String getTypeIDObjectString() { return "new ::hadoop::VectorTypeID(" + element.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb) { element.genSetRTIFilter(cb); } @@ -198,6 +207,7 @@ public class JVector extends JCompType { setCType(new CCompType()); } + @Override String getSignature() { return "[" + type.getSignature() + "]"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java index 04c4bd84733..6d51df6cd15 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java @@ -39,6 +39,7 @@ class JavaGenerator extends CodeGenerator { * @param rlist List of records defined within this file * @param destDir output directory */ + @Override void genCode(String name, ArrayList ilist, ArrayList rlist, String destDir, ArrayList options) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java index 5f999ecb882..869e0594f79 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java @@ -110,6 +110,7 @@ public class RccTask extends Task { /** * Invoke the Hadoop record compiler on each record definition file */ + @Override public void execute() throws BuildException { if (src == null && filesets.size()==0) { throw new BuildException("There must be a file attribute or a fileset child element"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java index 3d4a82bac61..3af5910ccb4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java @@ -120,6 +120,7 @@ public class ParseException extends Exception { * of the final stack trace, and hence the correct error message * gets displayed. */ + @Override public String getMessage() { if (!specialConstructor) { return super.getMessage(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java index fcac0997d6e..c4c74cd6516 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java @@ -24,7 +24,6 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.record.compiler.*; import java.util.ArrayList; import java.util.Hashtable; -import java.util.Iterator; import java.io.File; import java.io.FileReader; import java.io.FileNotFoundException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java index 72acd13f743..7488606fe96 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java @@ -20,14 +20,6 @@ package org.apache.hadoop.record.compiler.generated; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.record.compiler.*; -import java.util.ArrayList; -import java.util.Hashtable; -import java.util.Iterator; -import java.io.File; -import java.io.FileReader; -import java.io.FileNotFoundException; -import java.io.IOException; /** * @deprecated Replaced by Avro. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java index 37df5b97e05..1396bf899b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java @@ -78,6 +78,7 @@ public class Token { /** * Returns the image. */ + @Override public String toString() { return image; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java index 4b0712e82fa..b6da7dadcd8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java @@ -138,6 +138,7 @@ public class TokenMgrError extends Error * * from this method for such cases in the release version of your parser. */ + @Override public String getMessage() { return super.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java index f7f4fb0d02e..32436abf829 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java @@ -69,6 +69,7 @@ public class FieldTypeInfo /** * Two FieldTypeInfos are equal if ach of their fields matches */ + @Override public boolean equals(Object o) { if (this == o) return true; @@ -87,6 +88,7 @@ public class FieldTypeInfo * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java index 3a83d0896c4..f9c5320cfbc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java @@ -19,8 +19,6 @@ package org.apache.hadoop.record.meta; import java.io.IOException; -import java.util.*; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.record.RecordOutput; @@ -58,6 +56,7 @@ public class MapTypeID extends TypeID { return this.typeIDValue; } + @Override void write(RecordOutput rout, String tag) throws IOException { rout.writeByte(typeVal, tag); typeIDKey.write(rout, tag); @@ -68,6 +67,7 @@ public class MapTypeID extends TypeID { * Two map typeIDs are equal if their constituent elements have the * same type */ + @Override public boolean equals(Object o) { if (!super.equals(o)) return false; @@ -82,6 +82,7 @@ public class MapTypeID extends TypeID { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java index 3bd153cdc31..8a9d0b5fbbe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java @@ -122,6 +122,7 @@ public class RecordTypeInfo extends org.apache.hadoop.record.Record /** * Serialize the type information for a record */ + @Override public void serialize(RecordOutput rout, String tag) throws IOException { // write out any header, version info, here rout.startRecord(this, tag); @@ -133,6 +134,7 @@ public class RecordTypeInfo extends org.apache.hadoop.record.Record /** * Deserialize the type information for a record */ + @Override public void deserialize(RecordInput rin, String tag) throws IOException { // read in any header, version info rin.startRecord(tag); @@ -148,6 +150,7 @@ public class RecordTypeInfo extends org.apache.hadoop.record.Record * So we always throw an exception. * Not implemented. Always returns 0 if another RecordTypeInfo is passed in. */ + @Override public int compareTo (final Object peer_) throws ClassCastException { if (!(peer_ instanceof RecordTypeInfo)) { throw new ClassCastException("Comparing different types of records."); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java index b7d19ea8157..d2c9ccdc753 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java @@ -72,6 +72,7 @@ public class StructTypeID extends TypeID { return null; } + @Override void write(RecordOutput rout, String tag) throws IOException { rout.writeByte(typeVal, tag); writeRest(rout, tag); @@ -155,9 +156,11 @@ public class StructTypeID extends TypeID { } } + @Override public boolean equals(Object o) { return super.equals(o); } + @Override public int hashCode() { return super.hashCode(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java index ea2e35eb792..5a76eb4bd1d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java @@ -89,6 +89,7 @@ public class TypeID { /** * Two base typeIDs are equal if they refer to the same type */ + @Override public boolean equals(Object o) { if (this == o) return true; @@ -107,6 +108,7 @@ public class TypeID { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { // See 'Effectve Java' by Joshua Bloch return 37*17+(int)typeVal; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java index 88f820b8b84..22ab07efdc2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java @@ -43,6 +43,7 @@ public class VectorTypeID extends TypeID { return this.typeIDElement; } + @Override void write(RecordOutput rout, String tag) throws IOException { rout.writeByte(typeVal, tag); typeIDElement.write(rout, tag); @@ -52,6 +53,7 @@ public class VectorTypeID extends TypeID { * Two vector typeIDs are equal if their constituent elements have the * same type */ + @Override public boolean equals(Object o) { if (!super.equals (o)) return false; @@ -64,6 +66,7 @@ public class VectorTypeID extends TypeID { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { return 37*17+typeIDElement.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java index 0e1c0864f54..b72e3ed6df7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.security.KerberosInfo; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java index fa82664bdd2..7ee452316a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java @@ -189,6 +189,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @exception IOException * if an I/O error occurs. */ + @Override public int read() throws IOException { if (!useWrap) { return inStream.read(); @@ -220,6 +221,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @exception IOException * if an I/O error occurs. */ + @Override public int read(byte[] b) throws IOException { return read(b, 0, b.length); } @@ -242,6 +244,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @exception IOException * if an I/O error occurs. */ + @Override public int read(byte[] b, int off, int len) throws IOException { if (!useWrap) { return inStream.read(b, off, len); @@ -286,6 +289,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @exception IOException * if an I/O error occurs. */ + @Override public long skip(long n) throws IOException { if (!useWrap) { return inStream.skip(n); @@ -312,6 +316,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @exception IOException * if an I/O error occurs. */ + @Override public int available() throws IOException { if (!useWrap) { return inStream.available(); @@ -329,6 +334,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @exception IOException * if an I/O error occurs. */ + @Override public void close() throws IOException { disposeSasl(); ostart = 0; @@ -344,6 +350,7 @@ public class SaslInputStream extends InputStream implements ReadableByteChannel * @return false, since this class does not support the * mark and reset methods. */ + @Override public boolean markSupported() { return false; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java index 4a0f3cb42cd..494ba1e7a4e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java @@ -19,9 +19,7 @@ package org.apache.hadoop.security; import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -122,6 +120,7 @@ public class SaslOutputStream extends OutputStream { * @exception IOException * if an I/O error occurs. */ + @Override public void write(int b) throws IOException { if (!useWrap) { outStream.write(b); @@ -146,6 +145,7 @@ public class SaslOutputStream extends OutputStream { * @exception IOException * if an I/O error occurs. */ + @Override public void write(byte[] b) throws IOException { write(b, 0, b.length); } @@ -163,6 +163,7 @@ public class SaslOutputStream extends OutputStream { * @exception IOException * if an I/O error occurs. */ + @Override public void write(byte[] inBuf, int off, int len) throws IOException { if (!useWrap) { outStream.write(inBuf, off, len); @@ -197,6 +198,7 @@ public class SaslOutputStream extends OutputStream { * @exception IOException * if an I/O error occurs. */ + @Override public void flush() throws IOException { outStream.flush(); } @@ -208,6 +210,7 @@ public class SaslOutputStream extends OutputStream { * @exception IOException * if an I/O error occurs. */ + @Override public void close() throws IOException { disposeSasl(); outStream.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index 54b1502acc2..98b3f5db295 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -239,6 +239,7 @@ public class SaslRpcClient { this.userPassword = SaslRpcServer.encodePassword(token.getPassword()); } + @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { NameCallback nc = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index b0588c27fd3..31718628f22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -194,7 +194,6 @@ public class SaslRpcServer { return encodePassword(secretManager.retrievePassword(tokenid)); } - /** {@inheritDoc} */ @Override public void handle(Callback[] callbacks) throws InvalidToken, UnsupportedCallbackException { @@ -253,7 +252,6 @@ public class SaslRpcServer { @InterfaceStability.Evolving public static class SaslGssCallbackHandler implements CallbackHandler { - /** {@inheritDoc} */ @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 2f65892db70..25bae83b1e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -498,6 +498,7 @@ public class SecurityUtil { * Uses standard java host resolution */ static class StandardHostResolver implements HostResolver { + @Override public InetAddress getByName(String host) throws UnknownHostException { return InetAddress.getByName(host); } @@ -542,6 +543,7 @@ public class SecurityUtil { * @return InetAddress with the fully qualified hostname or ip * @throws UnknownHostException if host does not exist */ + @Override public InetAddress getByName(String host) throws UnknownHostException { InetAddress addr = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java index 1b14927bd77..6335fc71469 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java @@ -20,10 +20,7 @@ package org.apache.hadoop.security; import java.io.IOException; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.StringTokenizer; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java index 7e60bed26a8..0ee1c60c593 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java @@ -20,12 +20,6 @@ package org.apache.hadoop.security; import java.io.IOException; import java.util.LinkedList; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.HashSet; -import java.util.StringTokenizer; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index 0d3c4822892..184b40d8ed5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -19,7 +19,6 @@ package org.apache.hadoop.security; import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; -import java.io.File; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.security.AccessControlContext; @@ -33,7 +32,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -344,6 +342,7 @@ public class UserGroupInformation { this.realUser = realUser; } + @Override public String getName() { return realUser.getUserName(); } @@ -700,6 +699,7 @@ public class UserGroupInformation { !isKeytab) { Thread t = new Thread(new Runnable() { + @Override public void run() { String cmd = conf.get("hadoop.kerberos.kinit.command", "kinit"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java index 922d3308421..e23612ec0f6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java @@ -48,6 +48,7 @@ public class AccessControlList implements Writable { WritableFactories.setFactory (AccessControlList.class, new WritableFactory() { + @Override public Writable newInstance() { return new AccessControlList(); } }); } @@ -318,6 +319,7 @@ public class AccessControlList implements Writable { /** * Serializes the AccessControlList object */ + @Override public void write(DataOutput out) throws IOException { String aclString = getAclString(); Text.writeString(out, aclString); @@ -326,6 +328,7 @@ public class AccessControlList implements Writable { /** * Deserializes the AccessControlList object */ + @Override public void readFields(DataInput in) throws IOException { String aclString = Text.readString(in); buildACL(aclString); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java index c2176e59890..6b86a05e7af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java @@ -42,6 +42,7 @@ public abstract class PolicyProvider { */ public static final PolicyProvider DEFAULT_POLICY_PROVIDER = new PolicyProvider() { + @Override public Service[] getServices() { return null; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java index 9ab6d68dafe..4407a7e8e32 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.security.KerberosInfo; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index 00dd2021eec..4c17f9fd25d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -124,6 +124,7 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory { * @throws GeneralSecurityException thrown if the keystores could not be * initialized due to a security error. */ + @Override public void init(SSLFactory.Mode mode) throws IOException, GeneralSecurityException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java index 3f88fb89a7d..c59000ea6a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java @@ -81,6 +81,7 @@ import javax.net.ssl.SSLSocket; @InterfaceStability.Evolving public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { + @Override boolean verify(String host, SSLSession session); void check(String host, SSLSocket ssl) throws IOException; @@ -125,12 +126,14 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { */ public final static SSLHostnameVerifier DEFAULT = new AbstractVerifier() { + @Override public final void check(final String[] hosts, final String[] cns, final String[] subjectAlts) throws SSLException { check(hosts, cns, subjectAlts, false, false); } + @Override public final String toString() { return "DEFAULT"; } }; @@ -143,6 +146,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { */ public final static SSLHostnameVerifier DEFAULT_AND_LOCALHOST = new AbstractVerifier() { + @Override public final void check(final String[] hosts, final String[] cns, final String[] subjectAlts) throws SSLException { @@ -152,6 +156,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { check(hosts, cns, subjectAlts, false, false); } + @Override public final String toString() { return "DEFAULT_AND_LOCALHOST"; } }; @@ -173,12 +178,14 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { */ public final static SSLHostnameVerifier STRICT = new AbstractVerifier() { + @Override public final void check(final String[] host, final String[] cns, final String[] subjectAlts) throws SSLException { check(host, cns, subjectAlts, false, true); } + @Override public final String toString() { return "STRICT"; } }; @@ -190,12 +197,14 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { */ public final static SSLHostnameVerifier STRICT_IE6 = new AbstractVerifier() { + @Override public final void check(final String[] host, final String[] cns, final String[] subjectAlts) throws SSLException { check(host, cns, subjectAlts, true, true); } + @Override public final String toString() { return "STRICT_IE6"; } }; @@ -205,11 +214,13 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { */ public final static SSLHostnameVerifier ALLOW_ALL = new AbstractVerifier() { + @Override public final void check(final String[] host, final String[] cns, final String[] subjectAlts) { // Allow everything - so never blowup. } + @Override public final String toString() { return "ALLOW_ALL"; } }; @@ -250,6 +261,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { * @param session SSLSession with the remote server * @return true if the host matched the one in the certificate. */ + @Override public boolean verify(String host, SSLSession session) { try { Certificate[] certs = session.getPeerCertificates(); @@ -262,20 +274,24 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { } } + @Override public void check(String host, SSLSocket ssl) throws IOException { check(new String[]{host}, ssl); } + @Override public void check(String host, X509Certificate cert) throws SSLException { check(new String[]{host}, cert); } + @Override public void check(String host, String[] cns, String[] subjectAlts) throws SSLException { check(new String[]{host}, cns, subjectAlts); } + @Override public void check(String host[], SSLSocket ssl) throws IOException { if (host == null) { @@ -332,6 +348,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { check(host, x509); } + @Override public void check(String[] host, X509Certificate cert) throws SSLException { String[] cns = Certificates.getCNs(cert); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index bbddf6fdc78..905c948da75 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -195,7 +195,7 @@ public class Token implements Writable { service = newService; } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { int len = WritableUtils.readVInt(in); if (identifier == null || identifier.length != len) { @@ -211,7 +211,7 @@ public class Token implements Writable { service.readFields(in); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, identifier.length); out.write(identifier); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java index b3e367bdf25..6ec3b7e606b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java @@ -85,6 +85,7 @@ extends TokenIdentifier { * * @return the username or owner */ + @Override public UserGroupInformation getUser() { if ( (owner == null) || ("".equals(owner.toString()))) { return null; @@ -150,7 +151,7 @@ extends TokenIdentifier { return a == null ? b == null : a.equals(b); } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (obj == this) { return true; @@ -168,11 +169,12 @@ extends TokenIdentifier { return false; } - /** {@inheritDoc} */ + @Override public int hashCode() { return this.sequenceNumber; } + @Override public void readFields(DataInput in) throws IOException { byte version = in.readByte(); if (version != VERSION) { @@ -200,6 +202,7 @@ extends TokenIdentifier { WritableUtils.writeVInt(out, masterKeyId); } + @Override public void write(DataOutput out) throws IOException { if (owner.getLength() > Text.DEFAULT_MAX_LEN) { throw new IOException("owner is too long to be serialized!"); @@ -213,6 +216,7 @@ extends TokenIdentifier { writeImpl(out); } + @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index 97530d10d02..29367a38ab3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -404,6 +404,7 @@ extends AbstractDelegationTokenIdentifier> private long lastMasterKeyUpdate; private long lastTokenCacheCleanup; + @Override public void run() { LOG.info("Starting expired delegation token remover thread, " + "tokenRemoverScanInterval=" + tokenRemoverScanInterval diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java index 3b5705eb6df..3458b2df829 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java @@ -91,6 +91,7 @@ public class DelegationKey implements Writable { /** */ + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, keyId); WritableUtils.writeVLong(out, expiryDate); @@ -104,6 +105,7 @@ public class DelegationKey implements Writable { /** */ + @Override public void readFields(DataInput in) throws IOException { keyId = WritableUtils.readVInt(in); expiryDate = WritableUtils.readVLong(in); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java index 0f5bf7a5130..c0c107933f5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java @@ -21,7 +21,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.ipc.VersionedProtocol; /** * Protocol implemented by the Name Node and Job Tracker which maps users to diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java index 4711ed2f56d..a1e20d242d6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java @@ -71,6 +71,7 @@ public class AsyncDiskService { public AsyncDiskService(String[] volumes) throws IOException { threadFactory = new ThreadFactory() { + @Override public Thread newThread(Runnable r) { return new Thread(threadGroup, r); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java index 4813847e845..7d321e8a297 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java @@ -240,19 +240,23 @@ public class DataChecksum implements Checksum { return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int } //Checksum Interface. Just a wrapper around member summer. + @Override public long getValue() { return summer.getValue(); } + @Override public void reset() { summer.reset(); inSum = 0; } + @Override public void update( byte[] b, int off, int len ) { if ( len > 0 ) { summer.update( b, off, len ); inSum += len; } } + @Override public void update( int b ) { summer.update( b ); inSum += 1; @@ -444,9 +448,13 @@ public class DataChecksum implements Checksum { public ChecksumNull() {} //Dummy interface + @Override public long getValue() { return 0; } + @Override public void reset() {} + @Override public void update(byte[] b, int off, int len) {} + @Override public void update(int b) {} }; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java index b9d2fc17ca1..5a0fb27fe45 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java @@ -48,13 +48,12 @@ public final class HeapSort implements IndexedSorter { * Sort the given range of items using heap sort. * {@inheritDoc} */ + @Override public void sort(IndexedSortable s, int p, int r) { sort(s, p, r, null); } - /** - * {@inheritDoc} - */ + @Override public void sort(final IndexedSortable s, final int p, final int r, final Progressable rep) { final int N = r - p; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java index 536b6f27ab2..90643577479 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java @@ -214,6 +214,7 @@ public class Progress { this.status = status; } + @Override public String toString() { StringBuilder result = new StringBuilder(); toString(result); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java index a7a2d37c84d..3dd30fe6b00 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java @@ -46,17 +46,17 @@ public class PureJavaCrc32 implements Checksum { reset(); } - /** {@inheritDoc} */ + @Override public long getValue() { return (~crc) & 0xffffffffL; } - /** {@inheritDoc} */ + @Override public void reset() { crc = 0xffffffff; } - /** {@inheritDoc} */ + @Override public void update(byte[] b, int off, int len) { int localCrc = crc; while(len > 7) { @@ -81,7 +81,7 @@ public class PureJavaCrc32 implements Checksum { crc = localCrc; } - /** {@inheritDoc} */ + @Override final public void update(int b) { crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff]; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java index 3d52eae077f..7fdfe1489f3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java @@ -42,18 +42,18 @@ public class PureJavaCrc32C implements Checksum { reset(); } - /** {@inheritDoc} */ + @Override public long getValue() { long ret = crc; return (~ret) & 0xffffffffL; } - /** {@inheritDoc} */ + @Override public void reset() { crc = 0xffffffff; } - /** {@inheritDoc} */ + @Override public void update(byte[] b, int off, int len) { int localCrc = crc; while(len > 7) { @@ -78,7 +78,7 @@ public class PureJavaCrc32C implements Checksum { crc = localCrc; } - /** {@inheritDoc} */ + @Override final public void update(int b) { crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff]; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java index 5686f82d056..73d8d90d425 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java @@ -52,13 +52,12 @@ public final class QuickSort implements IndexedSorter { * {@inheritDoc} If the recursion depth falls below {@link #getMaxDepth}, * then switch to {@link HeapSort}. */ + @Override public void sort(IndexedSortable s, int p, int r) { sort(s, p, r, null); } - /** - * {@inheritDoc} - */ + @Override public void sort(final IndexedSortable s, int p, int r, final Progressable rep) { sortInternal(s, p, r, rep, getMaxDepth(r - p)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java index 4520cb264aa..bf12de633fb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java @@ -257,6 +257,7 @@ public class ReflectionUtils { */ private static ThreadLocal cloneBuffers = new ThreadLocal() { + @Override protected synchronized CopyInCopyOutBuffer initialValue() { return new CopyInCopyOutBuffer(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index d563c1d7d58..b8c16f214d5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -30,7 +30,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; /** * A base class for running a Unix command. @@ -323,10 +322,12 @@ abstract public class Shell { this.run(); } + @Override public String[] getExecString() { return command; } + @Override protected void parseExecResult(BufferedReader lines) throws IOException { output = new StringBuffer(); char[] buf = new char[512]; @@ -348,6 +349,7 @@ abstract public class Shell { * * @return a string representation of the object. */ + @Override public String toString() { StringBuilder builder = new StringBuilder(); String[] args = getExecString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java index e39463fc849..f183a4c53cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java @@ -193,6 +193,7 @@ public abstract class Filter implements Writable { // Writable interface + @Override public void write(DataOutput out) throws IOException { out.writeInt(VERSION); out.writeInt(this.nbHash); @@ -200,6 +201,7 @@ public abstract class Filter implements Writable { out.writeInt(this.vectorSize); } + @Override public void readFields(DataInput in) throws IOException { int ver = in.readInt(); if (ver > 0) { // old unversioned format diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java index 1ff5b826008..7ac134c76ca 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java @@ -154,12 +154,14 @@ public class Key implements WritableComparable { // Writable + @Override public void write(DataOutput out) throws IOException { out.writeInt(bytes.length); out.write(bytes); out.writeDouble(weight); } + @Override public void readFields(DataInput in) throws IOException { this.bytes = new byte[in.readInt()]; in.readFully(this.bytes); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java index 8e867c4cbb7..bf4891378c6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java @@ -81,6 +81,7 @@ public class JenkinsHash extends Hash { *

Use for hash table lookup, or anything where one collision in 2^^32 is * acceptable. Do NOT use for cryptographic purposes. */ + @Override @SuppressWarnings("fallthrough") public int hash(byte[] key, int nbytes, int initval) { int length = nbytes; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java index 8e79f1aa5a2..6ed3dfd3df1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java @@ -37,6 +37,7 @@ public class MurmurHash extends Hash { return _instance; } + @Override public int hash(byte[] data, int length, int seed) { int m = 0x5bd1e995; int r = 24; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java index 202b2429cbc..50cb3a53c52 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java @@ -24,5 +24,6 @@ public interface CLICommand { public CommandExecutor getExecutor(String tag) throws IllegalArgumentException; public CLICommandTypes getType(); public String getCmd(); + @Override public String toString(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java index 55e99b51a68..602a07f3d58 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java @@ -31,6 +31,7 @@ public class CLITestCmd implements CLICommand { this.type = type; } + @Override public CommandExecutor getExecutor(String tag) throws IllegalArgumentException { if (getType() instanceof CLICommandFS) return new FSCmdExecutor(tag, new FsShell()); @@ -38,12 +39,17 @@ public class CLITestCmd implements CLICommand { IllegalArgumentException("Unknown type of test command: " + getType()); } + @Override public CLICommandTypes getType() { return type; } + + @Override public String getCmd() { return cmd; } + + @Override public String toString() { return cmd; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java index 86e86b6e1f2..98237ac7263 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java @@ -29,6 +29,7 @@ public class FSCmdExecutor extends CommandExecutor { this.shell = shell; } + @Override protected void execute(final String cmd) throws Exception{ String[] args = getCommandAsArgs(cmd, "NAMENODE", this.namenode); ToolRunner.run(shell, args); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java index 1928de44a4c..1c22ee68c77 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java @@ -24,7 +24,6 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.mortbay.util.ajax.JSON; -import org.mortbay.util.ajax.JSON.Output; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java index ec58f20177a..333cf2c8bf3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java @@ -39,8 +39,6 @@ import java.util.regex.Pattern; import junit.framework.TestCase; import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertNotNull; - import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.fs.Path; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java index df346dd657b..014844e28bc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java @@ -19,8 +19,6 @@ package org.apache.hadoop.conf; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java index b8f820c024d..3036d0c8394 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java @@ -18,9 +18,6 @@ package org.apache.hadoop.conf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.io.ByteArrayOutputStream; import java.util.Map; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java index 2cfb56a416b..f4367523cbe 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java @@ -99,17 +99,11 @@ public class TestReconfiguration { super(conf); } - /** - * {@inheritDoc} - */ @Override public Collection getReconfigurableProperties() { return Arrays.asList(PROP1, PROP2, PROP4); } - /** - * {@inheritDoc} - */ @Override public synchronized void reconfigurePropertyImpl(String property, String newVal) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java index 6c501009012..e9677badc38 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java @@ -67,6 +67,7 @@ public abstract class FSMainOperationsBaseTest { protected static FileSystem fSys; final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(final Path file) { return true; } @@ -74,6 +75,7 @@ public abstract class FSMainOperationsBaseTest { //A test filter with returns any path containing a "b" final private static PathFilter TEST_X_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { if(file.getName().contains("x") || file.getName().contains("X")) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java index 150b68e35d7..bf60e02cd67 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java @@ -67,6 +67,7 @@ public abstract class FileContextMainOperationsBaseTest { protected static FileContext fc; final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(final Path file) { return true; } @@ -74,6 +75,7 @@ public abstract class FileContextMainOperationsBaseTest { //A test filter with returns any path containing a "b" final private static PathFilter TEST_X_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { if(file.getName().contains("x") || file.getName().contains("X")) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java index 39ae24659b9..b80764cebf0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java @@ -176,6 +176,7 @@ public abstract class FileContextPermissionBase { .createRemoteUser("otherUser"); FileContext newFc = otherUser.doAs(new PrivilegedExceptionAction() { + @Override public FileContext run() throws Exception { FileContext newFc = FileContext.getFileContext(); return newFc; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java index 5786a6653cf..0acd416dd89 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java @@ -20,8 +20,6 @@ package org.apache.hadoop.fs; import java.io.*; import java.util.ArrayList; -import java.util.Iterator; - import junit.framework.Assert; import org.apache.hadoop.fs.permission.FsPermission; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java index 5d2c595f5cb..3e5970d228a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java @@ -19,7 +19,6 @@ package org.apache.hadoop.fs; import java.io.BufferedWriter; -import java.io.IOException; import java.io.OutputStreamWriter; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index ffb1dcf1f1d..de3d5566eb5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -29,11 +29,13 @@ public class TestDU extends TestCase { final static private File DU_DIR = new File( System.getProperty("test.build.data","/tmp"), "dutmp"); + @Override public void setUp() { FileUtil.fullyDelete(DU_DIR); assertTrue(DU_DIR.mkdirs()); } + @Override public void tearDown() throws IOException { FileUtil.fullyDelete(DU_DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java index 128c1fb088b..e0ee5f03f27 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java @@ -28,6 +28,7 @@ import org.junit.Test; public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest { + @Override @Before public void setUp() throws Exception { fSys = FileSystem.getLocal(new Configuration()); @@ -35,12 +36,14 @@ public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTes } static Path wd = null; + @Override protected Path getDefaultWorkingDirectory() throws IOException { if (wd == null) wd = FileSystem.getLocal(new Configuration()).getWorkingDirectory(); return wd; } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java index 335f403fe78..439ce2c15ca 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java @@ -26,12 +26,14 @@ import org.junit.Before; public class TestFcLocalFsPermission extends FileContextPermissionBase { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java index bc1126f2315..29b64638067 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java @@ -25,6 +25,7 @@ import org.junit.Before; public class TestFcLocalFsUtil extends FileContextUtilBase { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java index 7e5f99f5fb3..8dff124d7e4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java @@ -110,6 +110,7 @@ public class TestFileSystemCaching { public static class InitializeForeverFileSystem extends LocalFileSystem { final static Semaphore sem = new Semaphore(0); + @Override public void initialize(URI uri, Configuration conf) throws IOException { // notify that InitializeForeverFileSystem started initialization sem.release(); @@ -127,6 +128,7 @@ public class TestFileSystemCaching { public void testCacheEnabledWithInitializeForeverFS() throws Exception { final Configuration conf = new Configuration(); Thread t = new Thread() { + @Override public void run() { conf.set("fs.localfs1.impl", "org.apache.hadoop.fs." + "TestFileSystemCaching$InitializeForeverFileSystem"); @@ -167,11 +169,13 @@ public class TestFileSystemCaching { UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo"); UserGroupInformation ugiB = UserGroupInformation.createRemoteUser("bar"); FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } }); FileSystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -180,6 +184,7 @@ public class TestFileSystemCaching { assertSame(fsA, fsA1); FileSystem fsB = ugiB.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -192,6 +197,7 @@ public class TestFileSystemCaching { UserGroupInformation ugiA2 = UserGroupInformation.createRemoteUser("foo"); fsA = ugiA2.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -203,6 +209,7 @@ public class TestFileSystemCaching { ugiA.addToken(t1); fsA = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -245,12 +252,14 @@ public class TestFileSystemCaching { conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", null).getName()); UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo"); FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } }); //Now we should get the cached filesystem FileSystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -261,6 +270,7 @@ public class TestFileSystemCaching { //Now we should get a different (newly created) filesystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java index c66b4fa9017..574ed704da2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java @@ -19,8 +19,6 @@ package org.apache.hadoop.fs; import static org.junit.Assert.*; -import java.io.IOException; - import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.util.DataChecksum; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index 6f3c270232d..aae013fd775 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -18,7 +18,6 @@ package org.apache.hadoop.fs; import java.io.IOException; -import java.util.Iterator; import java.util.HashSet; import java.util.Random; import java.util.Set; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java index e3402abee97..f5decbb2b0c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java @@ -23,6 +23,7 @@ import org.junit.Before; public class TestLocalFSFileContextCreateMkdir extends FileContextCreateMkdirBaseTest { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java index 901b6c96ea3..d1c272cc859 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java @@ -27,6 +27,7 @@ import org.junit.Test; public class TestLocalFSFileContextMainOperations extends FileContextMainOperationsBaseTest { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); @@ -34,6 +35,7 @@ public class TestLocalFSFileContextMainOperations extends FileContextMainOperati } static Path wd = null; + @Override protected Path getDefaultWorkingDirectory() throws IOException { if (wd == null) wd = FileSystem.getLocal(new Configuration()).getWorkingDirectory(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java index 89684fe720a..64d0525a188 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java @@ -35,18 +35,22 @@ import org.junit.Before; */ public class TestLocalFSFileContextSymlink extends FileContextSymlinkBaseTest { + @Override protected String getScheme() { return "file"; } + @Override protected String testBaseDir1() throws IOException { return getAbsoluteTestRootDir(fc)+"/test1"; } + @Override protected String testBaseDir2() throws IOException { return getAbsoluteTestRootDir(fc)+"/test2"; } + @Override protected URI testURI() { try { return new URI("file:///"); @@ -55,6 +59,7 @@ public class TestLocalFSFileContextSymlink extends FileContextSymlinkBaseTest { } } + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java index 35c23cb0f3d..45e9bfb79c7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java @@ -47,15 +47,18 @@ public class TestLocalFsFCStatistics extends FCStatisticsBaseTest { fc.delete(getTestRootPath(fc, "test"), true); } + @Override protected void verifyReadBytes(Statistics stats) { Assert.assertEquals(blockSize, stats.getBytesRead()); } + @Override protected void verifyWrittenBytes(Statistics stats) { //Extra 12 bytes are written apart from the block. Assert.assertEquals(blockSize + 12, stats.getBytesWritten()); } + @Override protected URI getFsUri() { return URI.create(LOCAL_FS_ROOT_URI); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java index 512567a8d57..6c417cdb7c6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java @@ -24,6 +24,7 @@ import org.junit.Before; public class TestLocal_S3FileContextURI extends FileContextURIBase { + @Override @Before public void setUp() throws Exception { Configuration S3Conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java index c6324f8dc9f..22fa5b0629f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java @@ -24,6 +24,7 @@ import org.junit.Before; public class TestS3_LocalFileContextURI extends FileContextURIBase { + @Override @Before public void setUp() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index 8bfa7185b02..70bd62fa000 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -67,6 +67,7 @@ public class TestTrash extends TestCase { // filter that matches all the files that start with fileName* PathFilter pf = new PathFilter() { + @Override public boolean accept(Path file) { return file.getName().startsWith(prefix); } @@ -563,6 +564,7 @@ public class TestTrash extends TestCase { super(); this.home = home; } + @Override public Path getHomeDirectory() { return home; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java index b8b6957266a..baf25ded690 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java @@ -39,16 +39,20 @@ public class KFSEmulationImpl implements IFSImpl { localFS = FileSystem.getLocal(conf); } + @Override public boolean exists(String path) throws IOException { return localFS.exists(new Path(path)); } + @Override public boolean isDirectory(String path) throws IOException { return localFS.isDirectory(new Path(path)); } + @Override public boolean isFile(String path) throws IOException { return localFS.isFile(new Path(path)); } + @Override public String[] readdir(String path) throws IOException { FileStatus[] p = localFS.listStatus(new Path(path)); try { @@ -64,10 +68,12 @@ public class KFSEmulationImpl implements IFSImpl { return entries; } + @Override public FileStatus[] readdirplus(Path path) throws IOException { return localFS.listStatus(path); } + @Override public int mkdirs(String path) throws IOException { if (localFS.mkdirs(new Path(path))) return 0; @@ -75,12 +81,14 @@ public class KFSEmulationImpl implements IFSImpl { return -1; } + @Override public int rename(String source, String dest) throws IOException { if (localFS.rename(new Path(source), new Path(dest))) return 0; return -1; } + @Override public int rmdir(String path) throws IOException { if (isDirectory(path)) { // the directory better be empty @@ -91,21 +99,26 @@ public class KFSEmulationImpl implements IFSImpl { return -1; } + @Override public int remove(String path) throws IOException { if (isFile(path) && (localFS.delete(new Path(path), true))) return 0; return -1; } + @Override public long filesize(String path) throws IOException { return localFS.getFileStatus(new Path(path)).getLen(); } + @Override public short getReplication(String path) throws IOException { return 1; } + @Override public short setReplication(String path, short replication) throws IOException { return 1; } + @Override public String[][] getDataLocation(String path, long start, long len) throws IOException { BlockLocation[] blkLocations = localFS.getFileBlockLocations(localFS.getFileStatus(new Path(path)), @@ -123,6 +136,7 @@ public class KFSEmulationImpl implements IFSImpl { return hints; } + @Override public long getModificationTime(String path) throws IOException { FileStatus s = localFS.getFileStatus(new Path(path)); if (s == null) @@ -131,18 +145,21 @@ public class KFSEmulationImpl implements IFSImpl { return s.getModificationTime(); } + @Override public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException { // besides path/overwrite, the other args don't matter for // testing purposes. return localFS.append(new Path(path)); } + @Override public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException { // besides path/overwrite, the other args don't matter for // testing purposes. return localFS.create(new Path(path)); } + @Override public FSDataInputStream open(String path, int bufferSize) throws IOException { return localFS.open(new Path(path)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java index 3ff998f9967..c1c676e9b0b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java @@ -18,21 +18,17 @@ package org.apache.hadoop.fs.kfs; -import java.io.*; -import java.net.*; +import java.io.IOException; +import java.net.URI; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.kfs.KosmosFileSystem; - public class TestKosmosFileSystem extends TestCase { KosmosFileSystem kosmosFileSystem; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java index 5124211d344..3222cf43bbc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java @@ -67,6 +67,7 @@ public class DataGenerator extends Configured implements Tool { * namespace. Afterwards it reads the file attributes and creates files * in the file. All file content is filled with 'a'. */ + @Override public int run(String[] args) throws Exception { int exitCode = 0; exitCode = init(args); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index ea192c48494..7490be80af8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -186,6 +186,7 @@ public class LoadGenerator extends Configured implements Tool { /** Main loop * Each iteration decides what's the next operation and then pauses. */ + @Override public void run() { try { while (shouldRun) { @@ -281,6 +282,7 @@ public class LoadGenerator extends Configured implements Tool { * Before exiting, it prints the average execution for * each operation and operation throughput. */ + @Override public int run(String[] args) throws Exception { int exitCode = init(args); if (exitCode != 0) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java index 689e01dbf33..71649a59412 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java @@ -214,6 +214,7 @@ public class StructureGenerator { } /** Output a file attribute */ + @Override protected void outputFiles(PrintStream out, String prefix) { prefix = (prefix == null)?super.name: prefix + "/"+super.name; out.println(prefix + " " + numOfBlocks); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java index 84d142e0897..8024c6acc71 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java @@ -47,34 +47,42 @@ class InMemoryFileSystemStore implements FileSystemStore { private SortedMap inodes = new TreeMap(); private Map blocks = new HashMap(); + @Override public void initialize(URI uri, Configuration conf) { this.conf = conf; } + @Override public String getVersion() throws IOException { return "0"; } + @Override public void deleteINode(Path path) throws IOException { inodes.remove(normalize(path)); } + @Override public void deleteBlock(Block block) throws IOException { blocks.remove(block.getId()); } + @Override public boolean inodeExists(Path path) throws IOException { return inodes.containsKey(normalize(path)); } + @Override public boolean blockExists(long blockId) throws IOException { return blocks.containsKey(blockId); } + @Override public INode retrieveINode(Path path) throws IOException { return inodes.get(normalize(path)); } + @Override public File retrieveBlock(Block block, long byteRangeStart) throws IOException { byte[] data = blocks.get(block.getId()); File file = createTempFile(); @@ -100,6 +108,7 @@ class InMemoryFileSystemStore implements FileSystemStore { return result; } + @Override public Set listSubPaths(Path path) throws IOException { Path normalizedPath = normalize(path); // This is inefficient but more than adequate for testing purposes. @@ -112,6 +121,7 @@ class InMemoryFileSystemStore implements FileSystemStore { return subPaths; } + @Override public Set listDeepSubPaths(Path path) throws IOException { Path normalizedPath = normalize(path); String pathString = normalizedPath.toUri().getPath(); @@ -128,10 +138,12 @@ class InMemoryFileSystemStore implements FileSystemStore { return subPaths; } + @Override public void storeINode(Path path, INode inode) throws IOException { inodes.put(normalize(path), inode); } + @Override public void storeBlock(Block block, File file) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buf = new byte[8192]; @@ -157,11 +169,13 @@ class InMemoryFileSystemStore implements FileSystemStore { return new Path(path.toUri().getPath()); } + @Override public void purge() throws IOException { inodes.clear(); blocks.clear(); } + @Override public void dump() throws IOException { StringBuilder sb = new StringBuilder(getClass().getSimpleName()); sb.append(", \n"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java index bc8ccc0f681..abac70ac1bb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java @@ -55,15 +55,18 @@ class InMemoryNativeFileSystemStore implements NativeFileSystemStore { new TreeMap(); private SortedMap dataMap = new TreeMap(); + @Override public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; } + @Override public void storeEmptyFile(String key) throws IOException { metadataMap.put(key, new FileMetadata(key, 0, Time.now())); dataMap.put(key, new byte[0]); } + @Override public void storeFile(String key, File file, byte[] md5Hash) throws IOException { @@ -86,10 +89,12 @@ class InMemoryNativeFileSystemStore implements NativeFileSystemStore { dataMap.put(key, out.toByteArray()); } + @Override public InputStream retrieve(String key) throws IOException { return retrieve(key, 0); } + @Override public InputStream retrieve(String key, long byteRangeStart) throws IOException { @@ -118,15 +123,18 @@ class InMemoryNativeFileSystemStore implements NativeFileSystemStore { return result; } + @Override public FileMetadata retrieveMetadata(String key) throws IOException { return metadataMap.get(key); } + @Override public PartialListing list(String prefix, int maxListingLength) throws IOException { return list(prefix, maxListingLength, null, false); } + @Override public PartialListing list(String prefix, int maxListingLength, String priorLastKey, boolean recursive) throws IOException { @@ -165,16 +173,19 @@ class InMemoryNativeFileSystemStore implements NativeFileSystemStore { commonPrefixes.toArray(new String[0])); } + @Override public void delete(String key) throws IOException { metadataMap.remove(key); dataMap.remove(key); } + @Override public void copy(String srcKey, String dstKey) throws IOException { metadataMap.put(dstKey, metadataMap.get(srcKey)); dataMap.put(dstKey, dataMap.get(srcKey)); } + @Override public void purge(String prefix) throws IOException { Iterator> i = metadataMap.entrySet().iterator(); @@ -187,6 +198,7 @@ class InMemoryNativeFileSystemStore implements NativeFileSystemStore { } } + @Override public void dump() throws IOException { System.out.println(metadataMap.values()); System.out.println(dataMap.keySet()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java index 44d7a4a7c13..e990b92465f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java @@ -347,6 +347,7 @@ public class TestChRootedFileSystem { MockFileSystem() { super(mock(FileSystem.class)); } + @Override public void initialize(URI name, Configuration conf) throws IOException {} } } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java index 2f8d8ce8486..de4b1e87ac5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java @@ -33,6 +33,7 @@ import org.junit.Test; public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest { static FileSystem fcTarget; + @Override @Before public void setUp() throws Exception { Configuration conf = new Configuration(); @@ -42,6 +43,7 @@ public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTes super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java index 39e3515d03f..16b38b72ec1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java @@ -28,12 +28,14 @@ public class TestFcCreateMkdirLocalFs extends FileContextCreateMkdirBaseTest { + @Override @Before public void setUp() throws Exception { fc = ViewFsTestSetup.setupForViewFsLocalFs(); super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java index 235a182616e..5641c9d70bf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java @@ -36,6 +36,7 @@ public class TestFcMainOperationsLocalFs extends FileContext fclocal; Path targetOfTests; + @Override @Before public void setUp() throws Exception { /** @@ -79,6 +80,7 @@ public class TestFcMainOperationsLocalFs extends super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java index 3e92eb9cc12..0e44be9be85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java @@ -27,12 +27,14 @@ import org.junit.Before; public class TestFcPermissionsLocalFs extends FileContextPermissionBase { + @Override @Before public void setUp() throws Exception { fc = ViewFsTestSetup.setupForViewFsLocalFs(); super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java index e3f6e404a16..735dfcf3cf6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java @@ -160,6 +160,7 @@ public class TestViewFileSystemDelegationTokenSupport { static class FakeFileSystem extends RawLocalFileSystem { URI uri; + @Override public void initialize(URI name, Configuration conf) throws IOException { this.uri = name; } @@ -169,6 +170,7 @@ public class TestViewFileSystemDelegationTokenSupport { return new Path("/"); // ctor calls getUri before the uri is inited... } + @Override public URI getUri() { return uri; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java index 8d4c38e1e6f..4b45fc8c5b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java @@ -39,6 +39,7 @@ import org.junit.Before; public class TestViewFileSystemLocalFileSystem extends ViewFileSystemBaseTest { + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -47,6 +48,7 @@ public class TestViewFileSystemLocalFileSystem extends ViewFileSystemBaseTest { } + @Override @After public void tearDown() throws Exception { fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java index 3ba3e002e08..4786cd5fdf5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java @@ -42,6 +42,7 @@ import org.junit.Test; public class TestViewFileSystemWithAuthorityLocalFileSystem extends ViewFileSystemBaseTest { URI schemeWithAuthority; + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -55,12 +56,14 @@ public class TestViewFileSystemWithAuthorityLocalFileSystem extends ViewFileSyst fsView = FileSystem.get(schemeWithAuthority, conf); } + @Override @After public void tearDown() throws Exception { fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true); super.tearDown(); } + @Override @Test public void testBasicPaths() { Assert.assertEquals(schemeWithAuthority, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java index 2a4488ce765..99bcf5d32b7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java @@ -26,6 +26,7 @@ import org.junit.Before; public class TestViewFsLocalFs extends ViewFsBaseTest { + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -34,6 +35,7 @@ public class TestViewFsLocalFs extends ViewFsBaseTest { } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java index 81270c2320d..4325f403462 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java @@ -30,7 +30,6 @@ import org.apache.hadoop.fs.TestTrash; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.mortbay.log.Log; public class TestViewFsTrash { FileSystem fsTarget; // the target file system - the mount will point here @@ -46,6 +45,7 @@ public class TestViewFsTrash { super(); this.home = home; } + @Override public Path getHomeDirectory() { return home; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java index 217d3fcd94c..2e498f2c0a0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java @@ -41,6 +41,7 @@ import org.junit.Test; public class TestViewFsWithAuthorityLocalFs extends ViewFsBaseTest { URI schemeWithAuthority; + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -54,11 +55,13 @@ public class TestViewFsWithAuthorityLocalFs extends ViewFsBaseTest { fcView = FileContext.getFileContext(schemeWithAuthority, conf); } + @Override @After public void tearDown() throws Exception { super.tearDown(); } + @Override @Test public void testBasicPaths() { Assert.assertEquals(schemeWithAuthority, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java index 4a576d08ebf..9eec749336f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java index 80612d9b783..9c68b282f63 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java @@ -25,7 +25,6 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.net.Socket; import java.util.ArrayList; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -82,6 +81,7 @@ public abstract class ClientBaseWithFixes extends ZKTestCase { * */ protected class NullWatcher implements Watcher { + @Override public void process(WatchedEvent event) { /* nada */ } } @@ -97,6 +97,7 @@ public abstract class ClientBaseWithFixes extends ZKTestCase { clientConnected = new CountDownLatch(1); connected = false; } + @Override synchronized public void process(WatchedEvent event) { if (event.getState() == KeeperState.SyncConnected || event.getState() == KeeperState.ConnectedReadOnly) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java index c38bc534245..0985af18c68 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java @@ -184,6 +184,7 @@ class DummyHAService extends HAServiceTarget { } public static class DummyFencer implements FenceMethod { + @Override public void checkArgs(String args) throws BadFencingConfigurationException { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java index d9b10ae091c..eef6d7de41f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java @@ -46,14 +46,17 @@ public class TestGlobalFilter extends HttpServerFunctionalTest { static public class RecordingFilter implements Filter { private FilterConfig filterConfig = null; + @Override public void init(FilterConfig filterConfig) { this.filterConfig = filterConfig; } + @Override public void destroy() { this.filterConfig = null; } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (filterConfig == null) @@ -69,6 +72,7 @@ public class TestGlobalFilter extends HttpServerFunctionalTest { static public class Initializer extends FilterInitializer { public Initializer() {} + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addGlobalFilter("recording", RecordingFilter.class.getName(), null); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java index 73aebea486f..3bd77f039c8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java @@ -46,14 +46,17 @@ public class TestPathFilter extends HttpServerFunctionalTest { static public class RecordingFilter implements Filter { private FilterConfig filterConfig = null; + @Override public void init(FilterConfig filterConfig) { this.filterConfig = filterConfig; } + @Override public void destroy() { this.filterConfig = null; } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (filterConfig == null) @@ -69,6 +72,7 @@ public class TestPathFilter extends HttpServerFunctionalTest { static public class Initializer extends FilterInitializer { public Initializer() {} + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("recording", RecordingFilter.class.getName(), null); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index 6cd21beb1b7..3d8f0305f67 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -45,14 +45,17 @@ public class TestServletFilter extends HttpServerFunctionalTest { static public class SimpleFilter implements Filter { private FilterConfig filterConfig = null; - public void init(FilterConfig filterConfig) { + @Override + public void init(FilterConfig filterConfig) throws ServletException { this.filterConfig = filterConfig; } + @Override public void destroy() { this.filterConfig = null; } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (filterConfig == null) @@ -67,6 +70,7 @@ public class TestServletFilter extends HttpServerFunctionalTest { static public class Initializer extends FilterInitializer { public Initializer() {} + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("simple", SimpleFilter.class.getName(), null); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java index e3e885ad12b..74e9cc86bd3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java @@ -18,12 +18,10 @@ package org.apache.hadoop.io; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.lang.reflect.Type; import org.apache.avro.Schema; -import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.reflect.ReflectData; import org.apache.avro.reflect.ReflectDatumWriter; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java index e97f2068be4..8f99aab482b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java @@ -40,11 +40,13 @@ public class RandomDatum implements WritableComparable { return length; } + @Override public void write(DataOutput out) throws IOException { out.writeInt(length); out.write(data); } + @Override public void readFields(DataInput in) throws IOException { length = in.readInt(); if (data == null || length > data.length) @@ -102,6 +104,7 @@ public class RandomDatum implements WritableComparable { super(RandomDatum.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { int n1 = readInt(b1, s1); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java index 2ca6c87f8ed..077c0b065dc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java @@ -19,17 +19,9 @@ package org.apache.hadoop.io; import java.io.IOException; -import java.io.ByteArrayOutputStream; import java.util.EnumSet; import java.lang.reflect.Type; -import org.apache.avro.Schema; -import org.apache.avro.reflect.ReflectData; -import org.apache.avro.reflect.ReflectDatumWriter; -import org.apache.avro.reflect.ReflectDatumReader; -import org.apache.avro.io.BinaryEncoder; -import org.apache.avro.io.DecoderFactory; - import junit.framework.TestCase; /** Unit test for EnumSetWritable */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java index 486d93d4385..880bba0e8b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java @@ -48,9 +48,11 @@ public class TestGenericWritable extends TestCase { /** Dummy class for testing {@link GenericWritable} */ public static class Foo implements Writable { private String foo = "foo"; + @Override public void readFields(DataInput in) throws IOException { foo = Text.readString(in); } + @Override public void write(DataOutput out) throws IOException { Text.writeString(out, foo); } @@ -65,15 +67,19 @@ public class TestGenericWritable extends TestCase { public static class Bar implements Writable, Configurable { private int bar = 42; //The Answer to The Ultimate Question Of Life, the Universe and Everything private Configuration conf = null; + @Override public void readFields(DataInput in) throws IOException { bar = in.readInt(); } + @Override public void write(DataOutput out) throws IOException { out.writeInt(bar); } + @Override public Configuration getConf() { return conf; } + @Override public void setConf(Configuration conf) { this.conf = conf; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java index 86fefcf561c..509d75e807d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java @@ -91,6 +91,7 @@ public class TestMD5Hash extends TestCase { closeHash1.hashCode() != closeHash2.hashCode()); Thread t1 = new Thread() { + @Override public void run() { for (int i = 0; i < 100; i++) { MD5Hash hash = new MD5Hash(DFF); @@ -100,6 +101,7 @@ public class TestMD5Hash extends TestCase { }; Thread t2 = new Thread() { + @Override public void run() { for (int i = 0; i < 100; i++) { MD5Hash hash = new MD5Hash(D00); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java index bae0ccd836c..a48fb6770b2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java @@ -21,17 +21,14 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.io.nativeio.NativeIO; import org.junit.BeforeClass; -import org.junit.Before; import org.junit.Test; import static org.junit.Assume.*; import static org.junit.Assert.*; import java.io.IOException; import java.io.File; -import java.io.FileInputStream; import java.io.FileOutputStream; public class TestSecureIOUtils { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java index fe33fefd91c..1517c062b7d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java @@ -481,6 +481,7 @@ public class TestSequenceFile extends TestCase { super(in); } + @Override public void close() throws IOException { closed = true; super.close(); @@ -505,6 +506,7 @@ public class TestSequenceFile extends TestCase { try { new SequenceFile.Reader(fs, path, conf) { // this method is called by the SequenceFile.Reader constructor, overwritten, so we can access the opened file + @Override protected FSDataInputStream openFile(FileSystem fs, Path file, int bufferSize, long length) throws IOException { final InputStream in = super.openFile(fs, file, bufferSize, length); openedFile[0] = new TestFSDataInputStream(in); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java index 21da8c0dcea..df9fb540323 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java @@ -284,6 +284,7 @@ public class TestText extends TestCase { super(name); } + @Override public void run() { String name = this.getName(); DataOutputBuffer out = new DataOutputBuffer(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java index df48f3caced..f7d45b9da76 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java @@ -37,16 +37,19 @@ public class TestVersionedWritable extends TestCase { private static byte VERSION = 1; + @Override public byte getVersion() { return VERSION; } + @Override public void write(DataOutput out) throws IOException { super.write(out); // version. out.writeInt(state); } + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); // version this.state = in.readInt(); @@ -61,6 +64,7 @@ public class TestVersionedWritable extends TestCase { /** Required by test code, below. */ + @Override public boolean equals(Object o) { if (!(o instanceof SimpleVersionedWritable)) return false; @@ -85,6 +89,7 @@ public class TestVersionedWritable extends TestCase { SimpleVersionedWritable containedObject = new SimpleVersionedWritable(); String[] testStringArray = {"The", "Quick", "Brown", "Fox", "Jumped", "Over", "The", "Lazy", "Dog"}; + @Override public void write(DataOutput out) throws IOException { super.write(out); out.writeUTF(shortTestString); @@ -97,6 +102,7 @@ public class TestVersionedWritable extends TestCase { } + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); shortTestString = in.readUTF(); @@ -108,6 +114,7 @@ public class TestVersionedWritable extends TestCase { + @Override public boolean equals(Object o) { super.equals(o); @@ -134,6 +141,7 @@ public class TestVersionedWritable extends TestCase { /* This one checks that version mismatch is thrown... */ public static class SimpleVersionedWritableV2 extends SimpleVersionedWritable { static byte VERSION = 2; + @Override public byte getVersion() { return VERSION; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java index 31c237f8728..971e237d50b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java @@ -38,10 +38,12 @@ public class TestWritable extends TestCase { int state = RANDOM.nextInt(); + @Override public void write(DataOutput out) throws IOException { out.writeInt(state); } + @Override public void readFields(DataInput in) throws IOException { this.state = in.readInt(); } @@ -53,6 +55,7 @@ public class TestWritable extends TestCase { } /** Required by test code, below. */ + @Override public boolean equals(Object o) { if (!(o instanceof SimpleWritable)) return false; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java index 7cb069ab006..396079c3948 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java @@ -39,10 +39,12 @@ public class TestWritableName extends TestCase { int state = RANDOM.nextInt(); + @Override public void write(DataOutput out) throws IOException { out.writeInt(state); } + @Override public void readFields(DataInput in) throws IOException { this.state = in.readInt(); } @@ -54,6 +56,7 @@ public class TestWritableName extends TestCase { } /** Required by test code, below. */ + @Override public boolean equals(Object o) { if (!(o instanceof SimpleWritable)) return false; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java index 2caef859e64..280f1a8785c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java @@ -40,68 +40,81 @@ public class TestCodecFactory extends TestCase { return conf; } + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return null; } + @Override public Class getCompressorType() { return null; } + @Override public Compressor createCompressor() { return null; } + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { return null; } + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return null; } + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { return null; } + @Override public Class getDecompressorType() { return null; } + @Override public Decompressor createDecompressor() { return null; } + @Override public String getDefaultExtension() { return ".base"; } } private static class BarCodec extends BaseCodec { + @Override public String getDefaultExtension() { return "bar"; } } private static class FooBarCodec extends BaseCodec { + @Override public String getDefaultExtension() { return ".foo.bar"; } } private static class FooCodec extends BaseCodec { + @Override public String getDefaultExtension() { return ".foo"; } } private static class NewGzipCodec extends BaseCodec { + @Override public String getDefaultExtension() { return ".gz"; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java index 1584895407e..c25c4dc427b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java @@ -93,6 +93,7 @@ public class NanoTimer { * * Note: If timer is never started, "ERR" will be returned. */ + @Override public String toString() { if (!readable()) { return "ERR"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java index 6242ea6b370..2682634516f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java @@ -35,7 +35,6 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader; import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Location; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; -import org.apache.hadoop.util.NativeCodeLoader; import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java index bd56d449653..7a2c2fc9c47 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java @@ -19,9 +19,6 @@ package org.apache.hadoop.io.file.tfile; import java.io.IOException; -import org.apache.hadoop.io.RawComparator; -import org.apache.hadoop.io.WritableComparator; - /** * * Byte arrays test case class using GZ compression codec, base class of none diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java index 070177310e7..05b89fb0b33 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java @@ -147,12 +147,14 @@ public class TestTFileSeqFileComparison extends TestCase { this.writer = new TFile.Writer(fsdos, minBlkSize, compress, null, conf); } + @Override public void append(BytesWritable key, BytesWritable value) throws IOException { writer.append(key.get(), 0, key.getSize(), value.get(), 0, value .getSize()); } + @Override public void close() throws IOException { writer.close(); fsdos.close(); @@ -195,22 +197,27 @@ public class TestTFileSeqFileComparison extends TestCase { - valueBuffer.length)]; } + @Override public byte[] getKey() { return keyBuffer; } + @Override public int getKeyLength() { return keyLength; } + @Override public byte[] getValue() { return valueBuffer; } + @Override public int getValueLength() { return valueLength; } + @Override public boolean next() throws IOException { if (scanner.atEnd()) return false; Entry entry = scanner.entry(); @@ -224,6 +231,7 @@ public class TestTFileSeqFileComparison extends TestCase { return true; } + @Override public void close() throws IOException { scanner.close(); reader.close(); @@ -264,11 +272,13 @@ public class TestTFileSeqFileComparison extends TestCase { } } + @Override public void append(BytesWritable key, BytesWritable value) throws IOException { writer.append(key, value); } + @Override public void close() throws IOException { writer.close(); fsdos.close(); @@ -289,26 +299,32 @@ public class TestTFileSeqFileComparison extends TestCase { value = new BytesWritable(); } + @Override public byte[] getKey() { return key.get(); } + @Override public int getKeyLength() { return key.getSize(); } + @Override public byte[] getValue() { return value.get(); } + @Override public int getValueLength() { return value.getSize(); } + @Override public boolean next() throws IOException { return reader.next(key, value); } + @Override public void close() throws IOException { reader.close(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index b9d4ec5690a..acd728b0ecb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -88,6 +88,7 @@ public class TestNativeIO { List statters = new ArrayList(); for (int i = 0; i < 10; i++) { Thread statter = new Thread() { + @Override public void run() { long et = Time.now() + 5000; while (Time.now() < et) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java index 4949ef31406..77c9e30eed4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java @@ -252,6 +252,7 @@ public class TestFailoverProxy { this.unreliable = unreliable; } + @Override public void run() { try { result = unreliable.failsIfIdentifierDoesntMatch("impl2"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java index 54fe6778440..5b77698b100 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java @@ -19,7 +19,6 @@ package org.apache.hadoop.io.retry; import java.io.IOException; -import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.StandbyException; @@ -60,24 +59,29 @@ public class UnreliableImplementation implements UnreliableInterface { this.exceptionToFailWith = exceptionToFailWith; } + @Override public void alwaysSucceeds() { // do nothing } + @Override public void alwaysFailsWithFatalException() throws FatalException { throw new FatalException(); } + @Override public void alwaysFailsWithRemoteFatalException() throws RemoteException { throw new RemoteException(FatalException.class.getName(), "Oops"); } + @Override public void failsOnceThenSucceeds() throws UnreliableException { if (failsOnceInvocationCount++ == 0) { throw new UnreliableException(); } } + @Override public boolean failsOnceThenSucceedsWithReturnValue() throws UnreliableException { if (failsOnceWithValueInvocationCount++ == 0) { throw new UnreliableException(); @@ -85,6 +89,7 @@ public class UnreliableImplementation implements UnreliableInterface { return true; } + @Override public void failsTenTimesThenSucceeds() throws UnreliableException { if (failsTenTimesInvocationCount++ < 10) { throw new UnreliableException(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java index 275a0dc1e28..4548c869f91 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java @@ -21,10 +21,12 @@ package org.apache.hadoop.io.serializer.avro; public class Record { public int x = 7; + @Override public int hashCode() { return x; } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java index 181419c137e..1926ec55e53 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java @@ -70,10 +70,12 @@ public class TestAvroSerialization extends TestCase { public static class InnerRecord { public int x = 7; + @Override public int hashCode() { return x; } + @Override public boolean equals(Object obj) { if (this == obj) return true; @@ -91,10 +93,12 @@ public class TestAvroSerialization extends TestCase { public static class RefSerializable implements AvroReflectSerializable { public int x = 7; + @Override public int hashCode() { return x; } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java index ace6173faa1..a82419d5dd3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java @@ -208,6 +208,7 @@ public class MiniRPCBenchmark { try { client = proxyUserUgi.doAs(new PrivilegedExceptionAction() { + @Override public MiniProtocol run() throws IOException { MiniProtocol p = (MiniProtocol) RPC.getProxy(MiniProtocol.class, MiniProtocol.versionID, addr, conf); @@ -235,6 +236,7 @@ public class MiniRPCBenchmark { long start = Time.now(); try { client = currentUgi.doAs(new PrivilegedExceptionAction() { + @Override public MiniProtocol run() throws IOException { return (MiniProtocol) RPC.getProxy(MiniProtocol.class, MiniProtocol.versionID, addr, conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index c7bc6411de2..a0d6de0e9a8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -130,6 +130,7 @@ public class TestIPC { this.count = count; } + @Override public void run() { for (int i = 0; i < count; i++) { try { @@ -219,6 +220,7 @@ public class TestIPC { private static class IOEOnReadWritable extends LongWritable { public IOEOnReadWritable() {} + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); maybeThrowIOE(); @@ -229,6 +231,7 @@ public class TestIPC { private static class RTEOnReadWritable extends LongWritable { public RTEOnReadWritable() {} + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); maybeThrowRTE(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java index e2e32c75ba1..bf9fbc26d85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java @@ -106,17 +106,21 @@ public class TestRPC { public static class TestImpl implements TestProtocol { int fastPingCounter = 0; + @Override public long getProtocolVersion(String protocol, long clientVersion) { return TestProtocol.versionID; } + @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int hashcode) { return new ProtocolSignature(TestProtocol.versionID, null); } + @Override public void ping() {} + @Override public synchronized void slowPing(boolean shouldSlow) { if (shouldSlow) { while (fastPingCounter < 2) { @@ -131,17 +135,22 @@ public class TestRPC { } } + @Override public String echo(String value) throws IOException { return value; } + @Override public String[] echo(String[] values) throws IOException { return values; } + @Override public Writable echo(Writable writable) { return writable; } + @Override public int add(int v1, int v2) { return v1 + v2; } + @Override public int add(int[] values) { int sum = 0; for (int i = 0; i < values.length; i++) { @@ -150,16 +159,19 @@ public class TestRPC { return sum; } + @Override public int error() throws IOException { throw new IOException("bobo"); } + @Override public void testServerGet() throws IOException { if (!(Server.get() instanceof RPC.Server)) { throw new IOException("Server.get() failed"); } } + @Override public int[] exchange(int[] values) { for (int i = 0; i < values.length; i++) { values[i] = i; @@ -186,6 +198,7 @@ public class TestRPC { } // do two RPC that transfers data. + @Override public void run() { int[] indata = new int[datasize]; int[] outdata = null; @@ -220,6 +233,7 @@ public class TestRPC { return done; } + @Override public void run() { try { proxy.slowPing(true); // this would hang until two fast pings happened diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java index 50ae210ea9e..e2b7707cd99 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java @@ -284,6 +284,7 @@ System.out.println("echo int is NOT supported"); "org.apache.hadoop.ipc.TestRPCCompatibility$TestProtocol1") public interface TestProtocol4 extends TestProtocol2 { public static final long versionID = 4L; + @Override int echo(int value) throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 9246fd5d721..014875440e2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -136,15 +136,18 @@ public class TestSaslRPC { public static class TestTokenSecretManager extends SecretManager { + @Override public byte[] createPassword(TestTokenIdentifier id) { return id.getBytes(); } + @Override public byte[] retrievePassword(TestTokenIdentifier id) throws InvalidToken { return id.getBytes(); } + @Override public TestTokenIdentifier createIdentifier() { return new TestTokenIdentifier(); } @@ -152,6 +155,7 @@ public class TestSaslRPC { public static class BadTokenSecretManager extends TestTokenSecretManager { + @Override public byte[] retrievePassword(TestTokenIdentifier id) throws InvalidToken { throw new InvalidToken(ERROR_MESSAGE); @@ -186,6 +190,7 @@ public class TestSaslRPC { public static class TestSaslImpl extends TestRPC.TestImpl implements TestSaslProtocol { + @Override public AuthenticationMethod getAuthMethod() throws IOException { return UserGroupInformation.getCurrentUser().getAuthenticationMethod(); } @@ -450,6 +455,7 @@ public class TestSaslRPC { current.addToken(token); current.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException { TestSaslProtocol proxy = null; try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java index 8d5cfc9a553..ec54f596869 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java @@ -43,6 +43,7 @@ public class TestMetricsServlet extends TestCase { * Initializes, for testing, two NoEmitMetricsContext's, and adds one value * to the first of them. */ + @Override public void setUp() throws IOException { nc1 = new NoEmitMetricsContext(); nc1.init("test1", ContextFactory.getFactory()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java index 685fedc22ca..5b75e33e318 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java @@ -21,8 +21,6 @@ package org.apache.hadoop.metrics2.lib; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.apache.hadoop.test.MockitoMaker.*; - import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; @@ -145,6 +143,7 @@ public class TestMetricsAnnotations { @Metric int getG0() { return 0; } + @Override public void getMetrics(MetricsCollector collector, boolean all) { collector.addRecord("foo") .setContext("foocontext") @@ -183,6 +182,7 @@ public class TestMetricsAnnotations { @Metric MutableCounterInt c1; + @Override public void getMetrics(MetricsCollector collector, boolean all) { collector.addRecord("foo"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java index 1969ccee54e..47b496fa57d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java @@ -51,6 +51,7 @@ public class TestMetricsRegistry { assertTrue("s1 found", r.get("s1") instanceof MutableStat); expectMetricsException("Metric name c1 already exists", new Runnable() { + @Override public void run() { r.newCounter("c1", "test dup", 0); } }); } @@ -70,10 +71,12 @@ public class TestMetricsRegistry { r.newGauge("g1", "test add", 1); expectMetricsException("Unsupported add", new Runnable() { + @Override public void run() { r.add("c1", 42); } }); expectMetricsException("Unsupported add", new Runnable() { + @Override public void run() { r.add("g1", 42); } }); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java index 379e9401d48..4204e2b6245 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java @@ -21,10 +21,8 @@ import org.apache.hadoop.conf.Configuration; import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; /** * Implements the {@link DNSToSwitchMapping} via static mappings. Used diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java index 2cd2271f43b..aeb68ea1de8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java @@ -33,9 +33,11 @@ public class FromCpp extends TestCase { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java index 1cba75ed804..816d69ee26e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java @@ -23,8 +23,6 @@ import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import java.lang.reflect.Array; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Random; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java index 163ec1b00b2..38eb9a07614 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java @@ -34,9 +34,11 @@ public class TestRecordIO extends TestCase { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java index 129ba2ced86..5977f03f853 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java @@ -35,9 +35,11 @@ public class TestRecordVersioning extends TestCase { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java index d3c6385d745..7a3411e1efa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java @@ -33,9 +33,11 @@ public class ToCpp extends TestCase { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java index 3c12047be21..d8138817e1f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java @@ -22,7 +22,6 @@ import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; -import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -55,6 +54,7 @@ public class TestAuthenticationFilter extends TestCase { FilterContainer container = Mockito.mock(FilterContainer.class); Mockito.doAnswer( new Answer() { + @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { Object[] args = invocationOnMock.getArguments(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java index d432623be02..72d02dbc6e3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java @@ -29,13 +29,10 @@ import java.io.IOException; import java.security.Key; import java.security.NoSuchAlgorithmException; import java.util.HashMap; -import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.Collection; -import static org.mockito.Mockito.mock; - import javax.crypto.KeyGenerator; import org.apache.hadoop.io.Text; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java index ee7bc29d1e8..de35cd24607 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java @@ -39,7 +39,6 @@ import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; import org.junit.Test; -import org.apache.hadoop.ipc.TestSaslRPC; import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager; import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier; import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSelector; @@ -113,6 +112,7 @@ public class TestDoAsEffectiveUser { PROXY_USER_NAME, realUserUgi); UserGroupInformation curUGI = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public UserGroupInformation run() throws IOException { return UserGroupInformation.getCurrentUser(); } @@ -131,10 +131,12 @@ public class TestDoAsEffectiveUser { public class TestImpl implements TestProtocol { + @Override public String aMethod() throws IOException { return UserGroupInformation.getCurrentUser().toString(); } + @Override public long getProtocolVersion(String protocol, long clientVersion) throws IOException { return TestProtocol.versionID; @@ -168,6 +170,7 @@ public class TestDoAsEffectiveUser { PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -210,6 +213,7 @@ public class TestDoAsEffectiveUser { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -257,6 +261,7 @@ public class TestDoAsEffectiveUser { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -296,6 +301,7 @@ public class TestDoAsEffectiveUser { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -334,6 +340,7 @@ public class TestDoAsEffectiveUser { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = (TestProtocol) RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -375,6 +382,7 @@ public class TestDoAsEffectiveUser { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java index b284fe0c6a5..48627276f85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java @@ -54,6 +54,7 @@ public class TestGroupsCaching { private static Set allGroups = new HashSet(); private static Set blackList = new HashSet(); + @Override public List getGroups(String user) throws IOException { LOG.info("Getting groups for " + user); if (blackList.contains(user)) { @@ -62,6 +63,7 @@ public class TestGroupsCaching { return new LinkedList(allGroups); } + @Override public void cacheGroupsRefresh() throws IOException { LOG.info("Cache is being refreshed."); clearBlackList(); @@ -73,6 +75,7 @@ public class TestGroupsCaching { blackList.clear(); } + @Override public void cacheGroupsAdd(List groups) throws IOException { LOG.info("Adding " + groups + " to groups."); allGroups.addAll(groups); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java index e8b3a1c9188..99c5c2a83f2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java @@ -21,16 +21,11 @@ import static org.junit.Assert.*; import java.util.Arrays; import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.GroupMappingServiceProvider; import org.apache.hadoop.security.JniBasedUnixGroupsMapping; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.NativeCodeLoader; -import org.apache.hadoop.util.ReflectionUtils; import org.junit.Before; import org.junit.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java index 4d8224b7cca..ce8ee28207c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java @@ -92,6 +92,7 @@ public class TestUserGroupInformation { UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); UserGroupInformation curUGI = userGroupInfo.doAs(new PrivilegedExceptionAction(){ + @Override public UserGroupInformation run() throws IOException { return UserGroupInformation.getCurrentUser(); }}); @@ -316,6 +317,7 @@ public class TestUserGroupInformation { // ensure that the tokens are passed through doAs Collection> otherSet = ugi.doAs(new PrivilegedExceptionAction>>(){ + @Override public Collection> run() throws IOException { return UserGroupInformation.getCurrentUser().getTokens(); } @@ -342,6 +344,7 @@ public class TestUserGroupInformation { // ensure that the token identifiers are passed through doAs Collection otherSet = ugi .doAs(new PrivilegedExceptionAction>() { + @Override public Collection run() throws IOException { return UserGroupInformation.getCurrentUser().getTokenIdentifiers(); } @@ -358,6 +361,7 @@ public class TestUserGroupInformation { ugi.setAuthenticationMethod(am); Assert.assertEquals(am, ugi.getAuthenticationMethod()); ugi.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException { Assert.assertEquals(am, UserGroupInformation.getCurrentUser() .getAuthenticationMethod()); @@ -379,6 +383,7 @@ public class TestUserGroupInformation { Assert.assertEquals(am, UserGroupInformation .getRealAuthenticationMethod(proxyUgi)); proxyUgi.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException { Assert.assertEquals(AuthenticationMethod.PROXY, UserGroupInformation .getCurrentUser().getAuthenticationMethod()); @@ -451,6 +456,7 @@ public class TestUserGroupInformation { public void testUGIUnderNonHadoopContext() throws Exception { Subject nonHadoopSubject = new Subject(); Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction() { + @Override public Void run() throws IOException { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); assertNotNull(ugi); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java index 036395ea7f5..eebe27ae179 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java @@ -22,13 +22,10 @@ import java.util.Set; import java.util.List; import org.junit.Test; -import org.junit.Before; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java index 6d7d695663b..1741eb7477f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java @@ -18,8 +18,6 @@ package org.apache.hadoop.security.token; -import static junit.framework.Assert.assertEquals; - import java.io.*; import java.util.Arrays; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java index c1dd00a4d7d..85e227921f0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java @@ -19,7 +19,6 @@ package org.apache.hadoop.security.token.delegation; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; @@ -47,7 +46,6 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation; import org.apache.hadoop.util.Daemon; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.junit.Test; @@ -73,9 +71,11 @@ public class TestDelegationToken { return KIND; } + @Override public void write(DataOutput out) throws IOException { super.write(out); } + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); } @@ -231,6 +231,7 @@ public class TestDelegationToken { dtSecretManager, "SomeUser", "JobTracker"); // Fake renewer should not be able to renew shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "FakeRenewer"); return null; @@ -259,6 +260,7 @@ public class TestDelegationToken { Thread.sleep(2000); shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "JobTracker"); return null; @@ -280,6 +282,7 @@ public class TestDelegationToken { generateDelegationToken(dtSecretManager, "SomeUser", "JobTracker"); //Fake renewer should not be able to renew shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "FakeCanceller"); return null; @@ -287,6 +290,7 @@ public class TestDelegationToken { }, AccessControlException.class); dtSecretManager.cancelToken(token, "JobTracker"); shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "JobTracker"); return null; @@ -379,6 +383,7 @@ public class TestDelegationToken { final int numTokensPerThread = 100; class tokenIssuerThread implements Runnable { + @Override public void run() { for(int i =0;i , Runnable { * * @return the class name of the wrapper callable/runnable. */ + @Override public String toString() { return (runnable != null) ? runnable.getClass().getSimpleName() : callable.getClass().getSimpleName(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java index a2e543aefe8..cfb567e6312 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java @@ -328,6 +328,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc getAuthority()); UserGroupInformation ugi = getUGI(user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public T run() throws Exception { FileSystem fs = createFileSystem(conf); Instrumentation instrumentation = getServer().get(Instrumentation.class); @@ -362,6 +363,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).getAuthority()); UserGroupInformation ugi = getUGI(user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return createFileSystem(conf); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java index 9a36955d6a2..ee4455c9998 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java @@ -85,16 +85,19 @@ public class InstrumentationService extends BaseService implements Instrumentati all.put("samplers", (Map) samplers); jvmVariables.put("free.memory", new VariableHolder(new Instrumentation.Variable() { + @Override public Long getValue() { return Runtime.getRuntime().freeMemory(); } })); jvmVariables.put("max.memory", new VariableHolder(new Instrumentation.Variable() { + @Override public Long getValue() { return Runtime.getRuntime().maxMemory(); } })); jvmVariables.put("total.memory", new VariableHolder(new Instrumentation.Variable() { + @Override public Long getValue() { return Runtime.getRuntime().totalMemory(); } @@ -162,6 +165,7 @@ public class InstrumentationService extends BaseService implements Instrumentati long own; long total; + @Override public Cron start() { if (total != 0) { throw new IllegalStateException("Cron already used"); @@ -175,6 +179,7 @@ public class InstrumentationService extends BaseService implements Instrumentati return this; } + @Override public Cron stop() { if (total != 0) { throw new IllegalStateException("Cron already used"); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java index f4e5bafece5..2da7f24ec31 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java @@ -96,6 +96,7 @@ public class SchedulerService extends BaseService implements Scheduler { LOG.debug("Scheduling callable [{}], interval [{}] seconds, delay [{}] in [{}]", new Object[]{callable, delay, interval, unit}); Runnable r = new Runnable() { + @Override public void run() { String instrName = callable.getClass().getSimpleName(); Instrumentation instr = getServer().get(Instrumentation.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java index c56f6e49688..b040054267f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java @@ -147,6 +147,7 @@ public abstract class ServerWebApp extends Server implements ServletContextListe * * @param event servelt context event. */ + @Override public void contextInitialized(ServletContextEvent event) { try { init(); @@ -194,6 +195,7 @@ public abstract class ServerWebApp extends Server implements ServletContextListe * * @param event servelt context event. */ + @Override public void contextDestroyed(ServletContextEvent event) { destroy(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java index bce8c3b0d9b..f1260329763 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java @@ -29,6 +29,7 @@ public abstract class BooleanParam extends Param { super(name, defaultValue); } + @Override protected Boolean parse(String str) throws Exception { if (str.equalsIgnoreCase("true")) { return true; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java index b0e11735720..bc2c4a54c0b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java @@ -27,6 +27,7 @@ public abstract class ByteParam extends Param { super(name, defaultValue); } + @Override protected Byte parse(String str) throws Exception { return Byte.parseByte(str); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java index d76db629b2c..8baef67e8ca 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java @@ -32,6 +32,7 @@ public abstract class EnumParam> extends Param { klass = e; } + @Override protected E parse(String str) throws Exception { return Enum.valueOf(klass, str.toUpperCase()); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java index faa99a440e7..b7b08f6a9b6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java @@ -27,6 +27,7 @@ public abstract class IntegerParam extends Param { super(name, defaultValue); } + @Override protected Integer parse(String str) throws Exception { return Integer.parseInt(str); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java index c2399bf76b0..11bf0820604 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java @@ -27,6 +27,7 @@ public abstract class LongParam extends Param { super(name, defaultValue); } + @Override protected Long parse(String str) throws Exception { return Long.parseLong(str); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java index f73c52fd5a2..8af5373a3ec 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java @@ -55,6 +55,7 @@ public abstract class Param { protected abstract T parse(String str) throws Exception; + @Override public String toString() { return (value != null) ? value.toString() : "NULL"; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java index 7986e72bdb8..7d700c1744b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java @@ -34,6 +34,7 @@ public abstract class ShortParam extends Param { this(name, defaultValue, 10); } + @Override protected Short parse(String str) throws Exception { return Short.parseShort(str, radix); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java index 85bee1c9013..1695eb3aa2f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java @@ -36,6 +36,7 @@ public abstract class StringParam extends Param { parseParam(defaultValue); } + @Override public String parseParam(String str) { try { if (str != null) { @@ -52,6 +53,7 @@ public abstract class StringParam extends Param { return value; } + @Override protected String parse(String str) throws Exception { if (pattern != null) { if (!pattern.matcher(str).matches()) { diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java index 87b1420996e..0cb0cc64b34 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java @@ -47,14 +47,17 @@ public class TestHttpFSFileSystemLocalFileSystem extends BaseTestHttpFSWith { super(operation); } + @Override protected Path getProxiedFSTestDir() { return addPrefix(new Path(TestDirHelper.getTestDir().getAbsolutePath())); } + @Override protected String getProxiedFSURI() { return "file:///"; } + @Override protected Configuration getProxiedFSConf() { Configuration conf = new Configuration(false); conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, getProxiedFSURI()); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java index fa0a7555a75..b211e9a4661 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java @@ -32,19 +32,23 @@ public class TestHttpFSWithHttpFSFileSystem extends BaseTestHttpFSWith { super(operation); } + @Override protected Class getFileSystemClass() { return HttpFSFileSystem.class; } + @Override protected Path getProxiedFSTestDir() { return TestHdfsHelper.getHdfsTestDir(); } + @Override protected String getProxiedFSURI() { return TestHdfsHelper.getHdfsConf().get( CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY); } + @Override protected Configuration getProxiedFSConf() { return TestHdfsHelper.getHdfsConf(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java index a32671854c1..db4cdeeadb8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java @@ -34,7 +34,6 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.HFSTestCase; import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDirHelper; -import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.junit.Test; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java index f4996de542c..eb2cdc61427 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java @@ -79,6 +79,7 @@ public class TestHFSTestCase extends HFSTestCase { public void waitFor() { long start = Time.now(); long waited = waitFor(1000, new Predicate() { + @Override public boolean evaluate() throws Exception { return true; } @@ -93,6 +94,7 @@ public class TestHFSTestCase extends HFSTestCase { setWaitForRatio(1); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } @@ -107,6 +109,7 @@ public class TestHFSTestCase extends HFSTestCase { setWaitForRatio(2); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java index 10c798f3faa..74d34ec80ec 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java @@ -64,6 +64,7 @@ public class TestHTestCase extends HTestCase { public void waitFor() { long start = Time.now(); long waited = waitFor(1000, new Predicate() { + @Override public boolean evaluate() throws Exception { return true; } @@ -78,6 +79,7 @@ public class TestHTestCase extends HTestCase { setWaitForRatio(1); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } @@ -92,6 +94,7 @@ public class TestHTestCase extends HTestCase { setWaitForRatio(2); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java index 2afd7d35a41..26d253fecb3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java @@ -31,6 +31,7 @@ import org.junit.runners.model.Statement; public class TestHdfsHelper extends TestDirHelper { + @Override @Test public void dummy() { } diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 7d7c1a49617..674247f7252 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -230,6 +230,9 @@ Release 2.0.1-alpha - UNRELEASED HDFS-3177. Update DFSClient and DataXceiver to handle different checkum types in file checksum computation. (Kihwal Lee via szetszwo) + HDFS-3844. Add @Override and remove {@inheritdoc} and unnecessary + imports. (Jing Zhao via suresh) + OPTIMIZATIONS HDFS-2982. Startup performance suffers when there are many edit log diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java index 2386c841304..222d454a701 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java @@ -312,9 +312,6 @@ public class Hdfs extends AbstractFileSystem { return listing.toArray(new FileStatus[listing.size()]); } - /** - * {@inheritDoc} - */ @Override public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java index 4150c5c6fe1..438d56e52f0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.security.KeyStore; import java.security.cert.X509Certificate; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java index 39a9b3086a2..c24a59b87dd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java @@ -23,7 +23,6 @@ import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; @@ -35,7 +34,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil; -import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferEncryptor; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver; @@ -47,8 +45,6 @@ import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException; -import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; -import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.SocketInputWrapper; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java index f3575c4caa0..ac6adfefb63 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java @@ -118,7 +118,6 @@ public class BlockTokenIdentifier extends TokenIdentifier { return a == null ? b == null : a.equals(b); } - /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj == this) { @@ -135,7 +134,6 @@ public class BlockTokenIdentifier extends TokenIdentifier { return false; } - /** {@inheritDoc} */ @Override public int hashCode() { return (int) expiryDate ^ keyId ^ (int) blockId ^ modes.hashCode() diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 1e90c5892fe..3a6153e372e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -60,7 +60,6 @@ import static org.apache.hadoop.util.ExitUtil.terminate; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.FSClusterStats; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.Namesystem; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java index e33853f0de6..3ccbc8462a3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.BlockTargetPair; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.Namesystem; import org.apache.hadoop.hdfs.server.protocol.BalancerBandwidthCommand; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java index 8f921bde1fa..73926010a42 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java @@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DatanodeID; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.Namesystem; import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Time; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java index b7da1160484..860d1d261f8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java @@ -22,11 +22,7 @@ import java.util.Map; import java.util.Queue; import org.apache.hadoop.hdfs.protocol.Block; -import org.apache.hadoop.hdfs.server.blockmanagement.PendingDataNodeMessages.ReportedBlockInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; -import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; - -import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java index 60a1216d120..831f3430bee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java @@ -44,7 +44,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.BlockReader; import org.apache.hadoop.hdfs.BlockReaderFactory; -import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; @@ -60,14 +59,12 @@ import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.http.HtmlQuoting; -import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.VersionInfo; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java index 2d1ff6437b9..4393ec7bcad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java @@ -16,8 +16,6 @@ */ package org.apache.hadoop.hdfs.server.datanode; -import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; - import java.net.InetSocketAddress; import java.net.ServerSocket; import java.nio.channels.ServerSocketChannel; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java index de80f80cf2b..3816dc10d44 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java @@ -22,9 +22,6 @@ import org.apache.hadoop.classification.InterfaceStability; import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - /** * A generic abstract class to support reading edits log data from * persistent storage. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java index eb99077c179..7612814b85d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java @@ -30,12 +30,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LayoutVersion; -import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction; import org.apache.hadoop.hdfs.server.common.Storage; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index da8c2a1616a..42a90fbd2b8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.Block; -import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.LayoutVersion; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java index 66679b05fb1..e1882d94814 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java @@ -43,7 +43,6 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.LayoutVersion; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; -import org.apache.hadoop.hdfs.server.common.GenerationStamp; import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java index d6453fa8b54..a8df0f706c8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hdfs.DeprecatedUTF8; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction; -import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.ShortWritable; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index 44b0437d131..7090f455d8b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -18,13 +18,8 @@ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT; - import java.io.IOException; import java.net.InetSocketAddress; -import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; @@ -34,7 +29,6 @@ import org.apache.commons.logging.Log; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; import org.apache.hadoop.hdfs.web.AuthFilter; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java index e12ce698f36..6897e353fff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java @@ -75,7 +75,7 @@ class SerialNumberManager { return t; } - /** {@inheritDoc} */ + @Override public String toString() { return "max=" + max + ",\n t2i=" + t2i + ",\n i2t=" + i2t; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java index 8030f2817e6..3fd1dc26a0c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; -import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java index 2aade9eb14b..0c8ac6353cd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java @@ -31,13 +31,11 @@ import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization; import org.apache.hadoop.hdfs.tools.offlineImageViewer.ImageVisitor.ImageElement; -import org.apache.hadoop.hdfs.util.XMLUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.security.token.delegation.DelegationKey; -import org.xml.sax.helpers.AttributesImpl; /** * ImageLoaderCurrent processes Hadoop FSImage files and walks over diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java index 4685a2e6aed..6045615edc1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java @@ -53,7 +53,6 @@ public class CyclicIteration implements Iterable> { } } - /** {@inheritDoc} */ @Override public Iterator> iterator() { return new CyclicIterator(); @@ -89,13 +88,11 @@ public class CyclicIteration implements Iterable> { return i.next(); } - /** {@inheritDoc} */ @Override public boolean hasNext() { return hasnext; } - /** {@inheritDoc} */ @Override public Map.Entry next() { if (!hasnext) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java index 4724595d4af..893e0b7cb66 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java @@ -271,7 +271,6 @@ public class DataTransferTestUtil { } } - /** {@inheritDoc} */ @Override public String toString() { return error + " " + super.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java index 8f4f9c2be6d..888fadf5dd3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java @@ -51,9 +51,6 @@ public class PipelinesTestUtil extends DataTransferTestUtil { this.name = name; } - /** - * {@inheritDoc} - */ @Override public void run(NodeBytes nb) throws IOException { synchronized (rcv) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java index bea29f9c67d..cec0c594a71 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs; import static org.junit.Assert.*; -import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -29,9 +28,7 @@ import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; -import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.junit.Test; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java index 6119584c0d8..f3925c963cd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java @@ -23,10 +23,7 @@ import java.util.List; import org.junit.Test; import org.junit.Before; -import org.junit.After; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileUtil; @@ -35,9 +32,6 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; -import java.util.Properties; -import java.io.FileReader; -import java.io.FileWriter; import org.junit.Assert; import org.apache.hadoop.test.GenericTestUtils;