HDFS-2696. svn merge -c 1312068 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1312069 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-04-11 01:56:32 +00:00
parent a9ae92eb76
commit e9180d1e5b
7 changed files with 174 additions and 85 deletions

View File

@ -363,6 +363,8 @@ Release 2.0.0 - UNRELEASED
HDFS-3248. bootstrapStandby repeated twice in hdfs namenode usage message
(Colin Patrick McCabe via todd)
HDFS-2696. Fix the fuse-fds build. (Bruno Mahé via eli)
BREAKDOWN OF HDFS-1623 SUBTASKS
HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)

View File

@ -1,63 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="hadoopcontrib" default="compile" basedir=".">
<!-- In case one of the contrib subdirectories -->
<!-- fails the build or test targets and you cannot fix it: -->
<!-- Then add to fileset: excludes="badcontrib/build.xml" -->
<!-- ====================================================== -->
<!-- Compile contribs. -->
<!-- ====================================================== -->
<target name="compile">
<subant target="compile">
<fileset dir="." includes="*/build.xml"/>
</subant>
</target>
<!-- ====================================================== -->
<!-- Package contrib jars. -->
<!-- ====================================================== -->
<target name="package">
<subant target="package">
<fileset dir="." includes="*/build.xml"/>
</subant>
</target>
<!-- ====================================================== -->
<!-- Test all the contribs. -->
<!-- ====================================================== -->
<target name="test">
<subant target="test">
<fileset dir="." includes="fuse-dfs/build.xml"/>
</subant>
</target>
<!-- ====================================================== -->
<!-- Clean all the contribs. -->
<!-- ====================================================== -->
<target name="clean">
<subant target="clean">
<fileset dir="." includes="*/build.xml"/>
</subant>
</target>
</project>

View File

@ -70,7 +70,7 @@
<property name="ivy.dir" location="ivy" />
<property name="ivysettings.xml" location="${hadoop.root}/ivy/ivysettings.xml"/>
<loadproperties srcfile="${ivy.dir}/libraries.properties"/>
<loadproperties srcfile="${hadoop.root}/ivy/libraries.properties"/>
<loadproperties srcfile="ivy/libraries.properties"/>
<property name="ivy.jar" location="${hadoop.root}/ivy/ivy-${ivy.version}.jar"/>
<property name="ivy_repo_url"
value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" />

View File

@ -17,19 +17,19 @@
limitations under the License.
-->
<project name="fuse-dfs" default="jar" xmlns:ivy="antlib:org.apache.ivy.ant">
<project name="fuse-dfs" default="compile" xmlns:ivy="antlib:org.apache.ivy.ant">
<import file="../build-contrib.xml"/>
<import file="build-contrib.xml"/>
<target name="check-libhdfs-exists" if="fusedfs">
<target name="check-libhdfs-exists">
<property name="libhdfs.lib" value="${build.c++.libhdfs}/libhdfs.so"/>
<available file="${libhdfs.lib}" property="libhdfs-exists"/>
<fail message="libhdfs.so does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile -Dcompile.c++=true -Dlibhdfs=true">
<fail message="libhdfs.so does not exist: ${libhdfs.lib}.">
<condition><not><isset property="libhdfs-exists"/></not></condition>
</fail>
</target>
<target name="compile" if="fusedfs">
<target name="compile">
<exec executable="autoreconf" dir="${basedir}"
searchpath="yes" failonerror="yes">
<arg value="-if"/>
@ -46,24 +46,12 @@
<env key="PACKAGE_VERSION" value="0.1.0"/>
<env key="BUILD_PLATFORM" value="${build.platform}" />
</exec>
<mkdir dir="${build.dir}"/>
<mkdir dir="${build.dir}/test"/>
<!-- Use exec since the copy task doesn't preserve attrs -->
<exec executable="cp" failonerror="true">
<arg line="${hadoop.root}/src/contrib/fuse-dfs/src/fuse_dfs ${build.dir}"/>
</exec>
<exec executable="cp" failonerror="true">
<arg line="${hadoop.root}/src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh ${build.dir}"/>
</exec>
</target>
<target name="jar" />
<target name="package" />
<target name="compile-test" depends="ivy-retrieve-common, check-libhdfs-exists" if="fusedfs">
<target name="compile-test" depends="ivy-retrieve-common, check-libhdfs-exists">
<javac encoding="${build.encoding}"
srcdir="${src.test}"
includes="**/*.java"
@ -73,7 +61,7 @@
</javac>
</target>
<target name="test" depends="compile-test,check-libhdfs-exists" if="fusedfs">
<target name="test" depends="compile-test,check-libhdfs-exists">
<junit showoutput="${test.output}" fork="yes" printsummary="yes"
errorProperty="tests.failed" haltonfailure="no" failureProperty="tests.failed">
<classpath refid="test.classpath"/>

View File

@ -0,0 +1,161 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>3.0.0-SNAPSHOT</version>
<relativePath>../../../../../hadoop-project</relativePath>
</parent>
<groupId>org.apache.hadoop.contrib</groupId>
<artifactId>hadoop-hdfs-fuse</artifactId>
<version>3.0.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Apache Hadoop HDFS Fuse</name>
<description>Apache Hadoop HDFS Fuse</description>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<!-- workaround for filtered/unfiltered resources in same directory -->
<!-- remove when maven-eclipse-plugin 2.9 is available -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.6</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<threadCount>1</threadCount>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>javadoc</goal>
</goals>
<phase>site</phase>
<configuration>
<linksource>true</linksource>
<quiet>true</quiet>
<verbose>false</verbose>
<source>${maven.compile.source}</source>
<charset>${maven.compile.encoding}</charset>
<groups>
<group>
<title>HttpFs API</title>
<packages>*</packages>
</group>
</groups>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<executions>
<execution>
<configuration>
<dependencyLocationsEnabled>false</dependencyLocationsEnabled>
</configuration>
<goals>
<goal>dependencies</goal>
</goals>
<phase>site</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>fuse</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>prepare-compile-native</id>
<phase>generate-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy toDir="${project.build.directory}/fuse-dfs">
<fileset dir="${basedir}"/>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>compile-fuse</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<ant antfile="${project.build.directory}/fuse-dfs/build.xml"
dir="${project.build.directory}/fuse-dfs">
<target name="compile"/>
</ant>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -17,5 +17,5 @@
bin_PROGRAMS = fuse_dfs
fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c
AM_CFLAGS= -Wall -g
AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/src/c++/libhdfs -I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
AM_LDFLAGS= -L$(HADOOP_PREFIX)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/../../src/main/native -I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
AM_LDFLAGS= -L$(HADOOP_PREFIX)/../../target/native/target/usr/local/lib -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm -lm

View File

@ -30,6 +30,7 @@
<modules>
<module>hadoop-hdfs</module>
<module>hadoop-hdfs-httpfs</module>
<module>hadoop-hdfs/src/contrib/fuse-dfs</module>
</modules>
<build>