HADOOP-11074. Move s3-related FS connector code to hadoop-aws. (David S. Wang via Colin Patrick McCabe)

This commit is contained in:
Colin Patrick Mccabe 2014-09-10 16:14:08 -07:00
parent 7f80e14209
commit 5ec7fcd9dd
51 changed files with 392 additions and 12 deletions

View File

@ -15,7 +15,5 @@
org.apache.hadoop.fs.LocalFileSystem
org.apache.hadoop.fs.viewfs.ViewFileSystem
org.apache.hadoop.fs.s3.S3FileSystem
org.apache.hadoop.fs.s3native.NativeS3FileSystem
org.apache.hadoop.fs.ftp.FTPFileSystem
org.apache.hadoop.fs.HarFileSystem

View File

@ -327,6 +327,12 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -576,6 +582,12 @@
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk</artifactId>
<version>1.7.2</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.mina</groupId>

View File

@ -15,5 +15,361 @@
limitations under the License.
-->
<FindBugsFilter>
<Match>
<Package name="org.apache.hadoop.security.proto" />
</Match>
<Match>
<Package name="org.apache.hadoop.tools.proto" />
</Match>
<Match>
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
<Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
</Match>
<Match>
<Class name="~.*_jsp" />
<Bug pattern="DLS_DEAD_LOCAL_STORE" />
</Match>
<Match>
<Field name="_jspx_dependants" />
<Bug pattern="UWF_UNWRITTEN_FIELD" />
</Match>
<!--
Inconsistent synchronization for Client.Connection.out is
is intentional to make a connection to be closed instantly.
-->
<Match>
<Class name="org.apache.hadoop.ipc.Client$Connection" />
<Field name="out" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
<!--
Further SaslException should be ignored during cleanup and
original exception should be re-thrown.
-->
<Match>
<Class name="org.apache.hadoop.security.SaslRpcClient" />
<Bug pattern="DE_MIGHT_IGNORE" />
</Match>
<!--
Ignore Cross Scripting Vulnerabilities
-->
<Match>
<Package name="~org.apache.hadoop.mapred.*" />
<Bug code="XSS" />
</Match>
<Match>
<Class name="org.apache.hadoop.mapred.taskdetails_jsp" />
<Bug code="HRS" />
</Match>
<!--
Ignore warnings where child class has the same name as
super class. Classes based on Old API shadow names from
new API. Should go off after HADOOP-1.0
-->
<Match>
<Class name="~org.apache.hadoop.mapred.*" />
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
</Match>
<!--
Ignore warnings for usage of System.exit. This is
required and have been well thought out
-->
<Match>
<Class name="org.apache.hadoop.mapred.Child$2" />
<Method name="run" />
<Bug pattern="DM_EXIT" />
</Match>
<Match>
<Class name="org.apache.hadoop.mapred.JobTracker" />
<Method name="addHostToNodeMapping" />
<Bug pattern="DM_EXIT" />
</Match>
<Match>
<Class name="org.apache.hadoop.mapred.Task" />
<Or>
<Method name="done" />
<Method name="commit" />
<Method name="statusUpdate" />
</Or>
<Bug pattern="DM_EXIT" />
</Match>
<Match>
<Class name="org.apache.hadoop.mapred.Task$TaskReporter" />
<Method name="run" />
<Bug pattern="DM_EXIT" />
</Match>
<Match>
<Class name="org.apache.hadoop.util.ProgramDriver" />
<Method name="driver" />
<Bug pattern="DM_EXIT" />
</Match>
<Match>
<Class name="org.apache.hadoop.util.RunJar" />
<Method name="run" />
<Bug pattern="DM_EXIT" />
</Match>
<!--
We need to cast objects between old and new api objects
-->
<Match>
<Class name="org.apache.hadoop.mapred.OutputCommitter" />
<Bug pattern="BC_UNCONFIRMED_CAST" />
</Match>
<!--
We intentionally do the get name from the inner class
-->
<Match>
<Class name="org.apache.hadoop.mapred.TaskTracker$MapEventsFetcherThread" />
<Method name="run" />
<Bug pattern="IA_AMBIGUOUS_INVOCATION_OF_INHERITED_OR_OUTER_METHOD" />
</Match>
<Match>
<Class name="org.apache.hadoop.mapred.FileOutputCommitter" />
<Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
</Match>
<!--
Ignoring this warning as resolving this would need a non-trivial change in code
-->
<Match>
<Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor" />
<Method name="configure" />
<Field name="maxNumItems" />
<Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
</Match>
<!--
Comes from org.apache.jasper.runtime.ResourceInjector. Cannot do much.
-->
<Match>
<Class name="org.apache.hadoop.mapred.jobqueue_005fdetails_jsp" />
<Field name="_jspx_resourceInjector" />
<Bug pattern="SE_BAD_FIELD" />
</Match>
<!--
Storing textInputFormat and then passing it as a parameter. Safe to ignore.
-->
<Match>
<Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob" />
<Method name="createValueAggregatorJob" />
<Bug pattern="DLS_DEAD_STORE_OF_CLASS_LITERAL" />
</Match>
<!--
Can remove this after the upgrade to findbugs1.3.8
-->
<Match>
<Class name="org.apache.hadoop.mapred.lib.db.DBInputFormat" />
<Method name="getSplits" />
<Bug pattern="DLS_DEAD_LOCAL_STORE" />
</Match>
<!--
This is a spurious warning. Just ignore
-->
<Match>
<Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer" />
<Field name="kvindex" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
</FindBugsFilter>
<!--
core changes
-->
<Match>
<Class name="~org.apache.hadoop.*" />
<Bug code="MS" />
</Match>
<Match>
<Class name="org.apache.hadoop.fs.FileSystem" />
<Method name="checkPath" />
<Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
</Match>
<Match>
<Class name="org.apache.hadoop.io.Closeable" />
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
</Match>
<Match>
<Class name="org.apache.hadoop.security.AccessControlException" />
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
</Match>
<Match>
<Class name="org.apache.hadoop.util.ProcfsBasedProcessTree" />
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
</Match>
<!--
Streaming, Examples
-->
<Match>
<Class name="org.apache.hadoop.streaming.StreamUtil$TaskId" />
<Bug pattern="URF_UNREAD_FIELD" />
</Match>
<Match>
<Class name="org.apache.hadoop.examples.DBCountPageView" />
<Method name="verify" />
<Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
</Match>
<Match>
<Class name="org.apache.hadoop.examples.ContextFactory" />
<Method name="setAttributes" />
<Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
</Match>
<!--
TFile
-->
<Match>
<Class name="org.apache.hadoop.io.file.tfile.Chunk$ChunkDecoder" />
<Method name="close" />
<Bug pattern="SR_NOT_CHECKED" />
</Match>
<!--
The purpose of skip() is to drain remaining bytes of the chunk-encoded
stream (one chunk at a time). The termination condition is checked by
checkEOF().
-->
<Match>
<Class name="org.apache.hadoop.io.file.tfile.Utils" />
<Method name="writeVLong" />
<Bug pattern="SF_SWITCH_FALLTHROUGH" />
</Match>
<!--
The switch condition fall through is intentional and for performance
purposes.
-->
<Match>
<Class name="org.apache.hadoop.log.EventCounter"/>
<!-- backward compatibility -->
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
</Match>
<Match>
<Class name="org.apache.hadoop.metrics.jvm.EventCounter"/>
<!-- backward compatibility -->
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtobufRpcEngineProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtocolInfoProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.IpcConnectionContextProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcHeaderProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ha\.proto\.HAServiceProtocolProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ha\.proto\.ZKFCProtocolProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.security\.proto\.SecurityProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.protobuf\.TestProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.proto\.RefreshCallQueueProtocolProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
<Class name="~org\.apache\.hadoop\.ipc\.proto\.GenericRefreshProtocolProtos.*"/>
</Match>
<!--
Manually checked, misses child thread manually syncing on parent's intrinsic lock.
-->
<Match>
<Class name="org.apache.hadoop.metrics2.lib.MutableQuantiles" />
<Field name="previousSnapshot" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
<!--
The method uses a generic type T that extends two other types
T1 and T2. Findbugs complains of a cast from T1 to T2.
-->
<Match>
<Class name="org.apache.hadoop.fs.DelegationTokenRenewer" />
<Method name="removeRenewAction" />
<Bug pattern="BC_UNCONFIRMED_CAST" />
</Match>
<!-- Inconsistent synchronization flagged by findbugs is not valid. -->
<Match>
<Class name="org.apache.hadoop.ipc.Client$Connection" />
<Field name="in" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
<!--
The switch condition for INITIATE is expected to fallthru to RESPONSE
to process initial sasl response token included in the INITIATE
-->
<Match>
<Class name="org.apache.hadoop.ipc.Server$Connection" />
<Method name="processSaslMessage" />
<Bug pattern="SF_SWITCH_FALLTHROUGH" />
</Match>
<!-- Synchronization performed on util.concurrent instance. -->
<Match>
<Class name="org.apache.hadoop.service.AbstractService" />
<Method name="stop" />
<Bug code="JLM" />
</Match>
<Match>
<Class name="org.apache.hadoop.service.AbstractService" />
<Method name="waitForServiceToStop" />
<Bug code="JLM" />
</Match>
<!--
OpenStack Swift FS module -closes streams in a different method
from where they are opened.
-->
<Match>
<Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
<Method name="uploadFileAttempt"/>
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
</Match>
<Match>
<Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
<Method name="uploadFilePartAttempt"/>
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
</Match>
<!-- code from maven source, null value is checked at callee side. -->
<Match>
<Class name="org.apache.hadoop.util.ComparableVersion$ListItem" />
<Method name="compareTo" />
<Bug code="NP" />
</Match>
<Match>
<Class name="org.apache.hadoop.util.HttpExceptionUtils"/>
<Method name="validateResponse"/>
<Bug pattern="REC_CATCH_EXCEPTION"/>
</Match>
</FindBugsFilter>

View File

@ -0,0 +1,17 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.hadoop.fs.s3.S3FileSystem
org.apache.hadoop.fs.s3native.NativeS3FileSystem

View File

@ -46,15 +46,6 @@ public abstract class S3FileSystemContractBaseTest
super.tearDown();
}
public void testBlockSize() throws Exception {
Path file = path("/test/hadoop/file");
long newBlockSize = fs.getDefaultBlockSize(file) * 2;
fs.getConf().setLong("fs.s3.block.size", newBlockSize);
createFile(file);
assertEquals("Double default block size", newBlockSize,
fs.getFileStatus(file).getBlockSize());
}
public void testCanonicalName() throws Exception {
assertNull("s3 doesn't support security token and shouldn't have canonical name",
fs.getCanonicalServiceName());

View File

@ -83,6 +83,12 @@
<scope>compile</scope>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<scope>compile</scope>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-azure</artifactId>