HDFS-3537. Move libhdfs and fuse-dfs source to native subdirectories. Contributed by Colin Patrick McCabe

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1362119 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-07-16 16:42:53 +00:00
parent 612443951b
commit 69f7a329ba
62 changed files with 22 additions and 567 deletions

View File

@ -333,6 +333,9 @@ Branch-2 ( Unreleased changes )
HDFS-3659. Add missing @Override to methods across the hadoop-hdfs
project. (Brandon Li via harsh)
HDFS-3537. Move libhdfs and fuse-dfs source to native subdirectories.
(Colin Patrick McCabe via eli)
OPTIMIZATIONS
HDFS-2982. Startup performance suffers when there are many edit log

View File

@ -87,15 +87,15 @@ include_directories(
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_BINARY_DIR}
${JNI_INCLUDE_DIRS}
main/native/
main/native/libhdfs
)
set(_FUSE_DFS_VERSION 0.1.0)
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
add_dual_library(hdfs
main/native/hdfs.c
main/native/hdfsJniHelper.c
main/native/libhdfs/hdfs.c
main/native/libhdfs/jni_helper.c
)
target_link_dual_libraries(hdfs
${JAVA_JVM_LIBRARY}
@ -105,49 +105,46 @@ set(LIBHDFS_VERSION "0.0.0")
set_target_properties(hdfs PROPERTIES
SOVERSION ${LIBHDFS_VERSION})
add_executable(hdfs_test
main/native/hdfs_test.c
add_executable(test_libhdfs_ops
main/native/libhdfs/test/test_libhdfs_ops.c
)
target_link_libraries(hdfs_test
target_link_libraries(test_libhdfs_ops
hdfs
${JAVA_JVM_LIBRARY}
)
output_directory(hdfs_test target/usr/local/bin)
add_executable(hdfs_read
main/native/hdfs_read.c
add_executable(test_libhdfs_read
main/native/libhdfs/test/test_libhdfs_read.c
)
target_link_libraries(hdfs_read
target_link_libraries(test_libhdfs_read
hdfs
${JAVA_JVM_LIBRARY}
)
output_directory(hdfs_read target/usr/local/bin)
add_executable(hdfs_write
main/native/hdfs_write.c
add_executable(test_libhdfs_write
main/native/libhdfs/test/test_libhdfs_write.c
)
target_link_libraries(hdfs_write
target_link_libraries(test_libhdfs_write
hdfs
${JAVA_JVM_LIBRARY}
)
output_directory(hdfs_write target/usr/local/bin)
add_library(native_mini_dfs
main/native/native_mini_dfs.c
main/native/libhdfs/native_mini_dfs.c
)
target_link_libraries(native_mini_dfs
hdfs
)
add_executable(test_native_mini_dfs
main/native/test_native_mini_dfs.c
main/native/libhdfs/test_native_mini_dfs.c
)
target_link_libraries(test_native_mini_dfs
native_mini_dfs
)
add_executable(test_libhdfs_threaded
main/native/test_libhdfs_threaded.c
main/native/libhdfs/test_libhdfs_threaded.c
)
target_link_libraries(test_libhdfs_threaded
hdfs
@ -155,5 +152,4 @@ target_link_libraries(test_libhdfs_threaded
pthread
)
add_subdirectory(contrib/fuse-dfs/src)
add_subdirectory(main/native/fuse-dfs)

View File

@ -1,312 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- Imported by contrib/*/build.xml files to share generic targets. -->
<project name="hadoopbuildcontrib" xmlns:ivy="antlib:org.apache.ivy.ant">
<property name="name" value="${ant.project.name}"/>
<property name="root" value="${basedir}"/>
<property name="hadoop.root" location="${root}/../../../"/>
<!-- Load all the default properties, and any the user wants -->
<!-- to contribute (without having to type -D or edit this file -->
<property file="${user.home}/${name}.build.properties" />
<property file="${root}/build.properties" />
<property file="${hadoop.root}/build.properties" />
<property name="src.dir" location="${root}/src/java"/>
<property name="src.test" location="${root}/src/test"/>
<property name="src.examples" location="${root}/src/examples"/>
<available file="${src.examples}" type="dir" property="examples.available"/>
<available file="${src.test}" type="dir" property="test.available"/>
<property name="conf.dir" location="${hadoop.root}/conf"/>
<property name="test.junit.output.format" value="plain"/>
<property name="test.output" value="no"/>
<property name="test.timeout" value="900000"/>
<property name="build.dir" location="${hadoop.root}/build/contrib/${name}"/>
<property name="build.webapps.root.dir" value="${hadoop.root}/build/web"/>
<property name="build.webapps" value="${build.webapps.root.dir}/webapps"/>
<property name="build.classes" location="${build.dir}/classes"/>
<!-- NB: sun.arch.data.model is not supported on all platforms -->
<property name="build.platform"
value="${os.name}-${os.arch}-${sun.arch.data.model}"/>
<property name="build.c++.libhdfs" value="${build.dir}/../../c++/${build.platform}/lib"/>
<property name="build.test" location="${build.dir}/test"/>
<property name="build.examples" location="${build.dir}/examples"/>
<property name="hadoop.log.dir" location="${build.dir}/test/logs"/>
<!-- all jars together -->
<property name="javac.deprecation" value="off"/>
<property name="javac.debug" value="on"/>
<property name="build.ivy.lib.dir" value="${hadoop.root}/build/ivy/lib"/>
<property name="javadoc.link"
value="http://java.sun.com/j2se/1.4/docs/api/"/>
<property name="build.encoding" value="ISO-8859-1"/>
<fileset id="lib.jars" dir="${root}" includes="lib/*.jar"/>
<!-- IVY properties set here -->
<property name="ivy.dir" location="ivy" />
<property name="ivysettings.xml" location="${hadoop.root}/ivy/ivysettings.xml"/>
<loadproperties srcfile="${ivy.dir}/libraries.properties"/>
<loadproperties srcfile="ivy/libraries.properties"/>
<property name="ivy.jar" location="${hadoop.root}/ivy/ivy-${ivy.version}.jar"/>
<property name="ivy_repo_url"
value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" />
<property name="build.dir" location="build" />
<property name="build.ivy.dir" location="${build.dir}/ivy" />
<property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
<property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
<property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
<!--this is the naming policy for artifacts we want pulled down-->
<property name="ivy.artifact.retrieve.pattern"
value="${ant.project.name}/[conf]/[artifact]-[revision](-[classifier]).[ext]"/>
<!-- the normal classpath -->
<path id="contrib-classpath">
<pathelement location="${build.classes}"/>
<fileset refid="lib.jars"/>
<pathelement location="${hadoop.root}/build/classes"/>
<fileset dir="${hadoop.root}/lib">
<include name="**/*.jar" />
</fileset>
<path refid="${ant.project.name}.common-classpath"/>
<pathelement path="${clover.jar}"/>
</path>
<!-- the unit test classpath -->
<path id="test.classpath">
<pathelement location="${build.test}"/>
<pathelement location="${build.webapps.root.dir}"/>
<pathelement location="${hadoop.root}/build/test/core/classes"/>
<pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
<pathelement location="${hadoop.root}/build/test/mapred/classes"/>
<pathelement location="${hadoop.root}/src/contrib/test"/>
<pathelement location="${conf.dir}"/>
<pathelement location="${hadoop.root}/build"/>
<pathelement location="${build.examples}"/>
<path refid="contrib-classpath"/>
</path>
<!-- to be overridden by sub-projects -->
<target name="check-contrib"/>
<target name="init-contrib"/>
<!-- ====================================================== -->
<!-- Stuff needed by all targets -->
<!-- ====================================================== -->
<target name="init" depends="check-contrib" unless="skip.contrib">
<echo message="contrib: ${name}"/>
<mkdir dir="${build.dir}"/>
<mkdir dir="${build.classes}"/>
<mkdir dir="${build.test}"/>
<mkdir dir="${build.examples}"/>
<mkdir dir="${hadoop.log.dir}"/>
<antcall target="init-contrib"/>
</target>
<!-- ====================================================== -->
<!-- Compile a Hadoop contrib's files -->
<!-- ====================================================== -->
<target name="compile" depends="init, ivy-retrieve-common" unless="skip.contrib">
<echo message="contrib: ${name}"/>
<javac
encoding="${build.encoding}"
srcdir="${src.dir}"
includes="**/*.java"
destdir="${build.classes}"
debug="${javac.debug}"
deprecation="${javac.deprecation}">
<classpath refid="contrib-classpath"/>
</javac>
</target>
<!-- ======================================================= -->
<!-- Compile a Hadoop contrib's example files (if available) -->
<!-- ======================================================= -->
<target name="compile-examples" depends="compile" if="examples.available">
<echo message="contrib: ${name}"/>
<javac
encoding="${build.encoding}"
srcdir="${src.examples}"
includes="**/*.java"
destdir="${build.examples}"
debug="${javac.debug}">
<classpath refid="contrib-classpath"/>
</javac>
</target>
<!-- ================================================================== -->
<!-- Compile test code -->
<!-- ================================================================== -->
<target name="compile-test" depends="compile-examples" if="test.available">
<echo message="contrib: ${name}"/>
<javac
encoding="${build.encoding}"
srcdir="${src.test}"
includes="**/*.java"
destdir="${build.test}"
debug="${javac.debug}">
<classpath refid="test.classpath"/>
</javac>
</target>
<!-- ====================================================== -->
<!-- Make a Hadoop contrib's jar -->
<!-- ====================================================== -->
<target name="jar" depends="compile" unless="skip.contrib">
<echo message="contrib: ${name}"/>
<jar
jarfile="${build.dir}/hadoop-${version}-${name}.jar"
basedir="${build.classes}"
/>
</target>
<!-- ====================================================== -->
<!-- Make a Hadoop contrib's examples jar -->
<!-- ====================================================== -->
<target name="jar-examples" depends="compile-examples"
if="examples.available" unless="skip.contrib">
<echo message="contrib: ${name}"/>
<jar jarfile="${build.dir}/hadoop-${version}-${name}-examples.jar">
<fileset dir="${build.classes}">
</fileset>
<fileset dir="${build.examples}">
</fileset>
</jar>
</target>
<!-- ====================================================== -->
<!-- Package a Hadoop contrib -->
<!-- ====================================================== -->
<target name="package" depends="jar, jar-examples" unless="skip.contrib">
<mkdir dir="${dist.dir}/contrib/${name}"/>
<copy todir="${dist.dir}/contrib/${name}" includeEmptyDirs="false" flatten="true">
<fileset dir="${build.dir}">
<include name="hadoop-${version}-${name}.jar" />
</fileset>
</copy>
</target>
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
<target name="test" depends="compile-test, compile" if="test.available">
<echo message="contrib: ${name}"/>
<delete dir="${hadoop.log.dir}"/>
<mkdir dir="${hadoop.log.dir}"/>
<junit
printsummary="yes" showoutput="${test.output}"
haltonfailure="no" fork="yes" maxmemory="256m"
errorProperty="tests.failed" failureProperty="tests.failed"
timeout="${test.timeout}">
<sysproperty key="test.build.data" value="${build.test}/data"/>
<sysproperty key="build.test" value="${build.test}"/>
<sysproperty key="contrib.name" value="${name}"/>
<!-- requires fork=yes for:
relative File paths to use the specified user.dir
classpath to use build/contrib/*.jar
-->
<sysproperty key="java.net.preferIPv4Stack" value="true"/>
<sysproperty key="user.dir" value="${build.test}/data"/>
<sysproperty key="fs.default.name" value="${fs.default.name}"/>
<sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
<sysproperty key="hadoop.log.dir" value="${hadoop.log.dir}"/>
<sysproperty key="taskcontroller-path" value="${taskcontroller-path}"/>
<sysproperty key="taskcontroller-user" value="${taskcontroller-user}"/>
<classpath refid="test.classpath"/>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${build.test}" unless="testcase">
<fileset dir="${src.test}"
includes="**/Test*.java" excludes="**/${test.exclude}.java" />
</batchtest>
<batchtest todir="${build.test}" if="testcase">
<fileset dir="${src.test}" includes="**/${testcase}.java"/>
</batchtest>
</junit>
<fail if="tests.failed">Tests failed!</fail>
</target>
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
<target name="clean">
<echo message="contrib: ${name}"/>
<delete dir="${build.dir}"/>
</target>
<target name="ivy-probe-antlib" >
<condition property="ivy.found">
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
</condition>
</target>
<target name="ivy-download" description="To download ivy " unless="offline">
<get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
</target>
<target name="ivy-init-antlib" depends="ivy-download,ivy-probe-antlib" unless="ivy.found">
<typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
loaderRef="ivyLoader">
<classpath>
<pathelement location="${ivy.jar}"/>
</classpath>
</typedef>
<fail >
<condition >
<not>
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
</not>
</condition>
You need Apache Ivy 2.0 or later from http://ant.apache.org/
It could not be loaded from ${ivy_repo_url}
</fail>
</target>
<target name="ivy-init" depends="ivy-init-antlib">
<ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}"/>
</target>
<target name="ivy-resolve-common" depends="ivy-init">
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
</target>
<target name="ivy-retrieve-common" depends="ivy-resolve-common"
description="Retrieve Ivy-managed artifacts for the compile/test configurations">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" sync="true" />
<ivy:cachepath pathid="${ant.project.name}.common-classpath" conf="common" />
</target>
</project>

View File

@ -1,87 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="fuse-dfs" default="compile" xmlns:ivy="antlib:org.apache.ivy.ant">
<import file="build-contrib.xml"/>
<target name="check-libhdfs-exists">
<property name="libhdfs.lib" value="${build.c++.libhdfs}/libhdfs.so"/>
<available file="${libhdfs.lib}" property="libhdfs-exists"/>
<fail message="libhdfs.so does not exist: ${libhdfs.lib}.">
<condition><not><isset property="libhdfs-exists"/></not></condition>
</fail>
</target>
<target name="compile">
<exec executable="autoreconf" dir="${basedir}"
searchpath="yes" failonerror="yes">
<arg value="-if"/>
</exec>
<exec executable="${basedir}/configure" dir="${basedir}"
failonerror="yes">
</exec>
<exec executable="make" failonerror="true">
<env key="OS_NAME" value="${os.name}"/>
<env key="OS_ARCH" value="${os.arch}"/>
<env key="HADOOP_PREFIX" value="${hadoop.root}"/>
<env key="PACKAGE_VERSION" value="0.1.0"/>
<env key="BUILD_PLATFORM" value="${build.platform}" />
</exec>
</target>
<target name="jar" />
<target name="package" />
<target name="compile-test" depends="ivy-retrieve-common, check-libhdfs-exists">
<javac encoding="${build.encoding}"
srcdir="${src.test}"
includes="**/*.java"
destdir="${build.test}"
debug="${javac.debug}">
<classpath refid="test.classpath"/>
</javac>
</target>
<target name="test" depends="compile-test,check-libhdfs-exists">
<junit showoutput="${test.output}" fork="yes" printsummary="yes"
errorProperty="tests.failed" haltonfailure="no" failureProperty="tests.failed">
<classpath refid="test.classpath"/>
<sysproperty key="test.build.data" value="${build.test}/data"/>
<sysproperty key="build.test" value="${build.test}"/>
<sysproperty key="user.dir" value="${build.test}/data"/>
<sysproperty key="hadoop.log.dir" value="${hadoop.log.dir}"/>
<sysproperty key="test.src.dir" value="${test.src.dir}"/>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${build.test}" unless="testcase">
<fileset dir="${src.test}">
<include name="**/Test*.java"/>
</fileset>
</batchtest>
<batchtest todir="${build.test}" if="testcase">
<fileset dir="${src.test}">
<include name="**/${testcase}.java"/>
</fileset>
</batchtest>
</junit>
<fail if="tests.failed">Tests failed!</fail>
</target>
</project>

View File

@ -1,18 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
thriftstyle : $(XBUILT_SOURCES)

View File

@ -1,51 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
ifneq ($$(XBUILT_SOURCES),)
XBUILT_SOURCES := $$(XBUILT_SOURCES) $$(XTARGET)
else
XBUILT_SOURCES := $$(XTARGET)
endif
showvars:
@echo BUILD_SOURCES = $(BUILT_SOURCES)
@echo XBUILTSOURCES = $(XBUILT_SOURCES)
@echo DEFS = $(DEFS)
@echo CXXFLAGS = $(CXXFLAGS)
@echo AM_CXXFLAGS = $(AM_CXXFLAGS)
@echo CPPFLAGS = $(CPPFLAGS)
@echo AM_CPPFLAGS = $(AM_CPPFLAGS)
@echo LDFLAGS = $(LDFLAGS)
@echo AM_LDFLAGS = $(AM_LDFLAGS)
@echo LDADD = $(LDADD)
@echo LIBS = $(LIBS)
@echo EXTERNAL_LIBS = $(EXTERNAL_LIBS)
@echo EXTERNAL_PATH = $(EXTERNAL_PATH)
@echo MAKE = $(MAKE)
@echo MAKE_FLAGS = $(MAKE_FLAGS)
@echo AM_MAKEFLAGS = $(AM_MAKEFLAGS)
@echo top_builddir = $(top_builddir)
@echo top_srcdir = $(top_srcdir)
@echo srcdir = $(srcdir)
@echo PHPVAL = $(PHPVAL)
@echo PHPCONFIGDIR = $(PHPCONFIGDIR)
@echo PHPCONFIGINCLUDEDIR = $(PHPCONFIGINCLUDEDIR)
@echo PHPCONFIGINCLUDES = $(PHPCONFIGINCLUDES)
@echo PHPCONFIGLDFLAGS = $(PHPCONFIGLDFLAGS)
@echo PHPCONFIGLIBS = $(PHPCONFIGLIBS)
clean-common:
rm -rf gen-*

View File

@ -1,71 +0,0 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<ivy-module version="1.0" xmlns:m="http://ant.apache.org/ivy/maven">
<info organisation="org.apache.hadoop" module="${ant.project.name}">
<license name="Apache 2.0"/>
<ivyauthor name="Apache Hadoop Team" url="http://hadoop.apache.org"/>
<description>
FUSE plugin for HDFS
</description>
</info>
<configurations defaultconfmapping="default">
<!--these match the Maven configurations-->
<conf name="default" extends="master,runtime"/>
<conf name="master" description="contains the artifact but no dependencies"/>
<conf name="runtime" description="runtime but not the artifact" />
<conf name="common" visibility="private"
extends="runtime"
description="artifacts needed to compile/test the application"/>
<conf name="test" visibility="private" extends="runtime"/>
</configurations>
<publications>
<!--get the artifact from our module name-->
<artifact conf="master"/>
</publications>
<dependencies>
<dependency org="org.apache.hadoop"
name="hadoop-common"
rev="${hadoop-common.version}"
conf="common->default"/>
<dependency org="org.apache.hadoop"
name="hadoop-common"
rev="${hadoop-common.version}"
conf="common->default">
<artifact name="hadoop-common" type="tests" ext="jar" m:classifier="tests"/>
</dependency>
<dependency org="log4j"
name="log4j"
rev="${log4j.version}"
conf="common->master">
<exclude org="com.sun.jdmk"/>
<exclude org="com.sun.jmx"/>
<exclude org="javax.jms"/>
</dependency>
<dependency org="commons-logging"
name="commons-logging"
rev="${commons-logging.version}"
conf="common->master"/>
<dependency org="junit"
name="junit"
rev="${junit.version}"
conf="common->master"/>
</dependencies>
</ivy-module>

View File

@ -1,5 +0,0 @@
#This properties file lists the versions of the various artifacts used by streaming.
#It drives ivy and the generation of a maven POM
#Please list the dependencies name with version if they are different from the ones
#listed in the global libraries.properties file (in alphabetical order)

View File

@ -17,7 +17,7 @@
*/
#include "hdfs.h"
#include "hdfsJniHelper.h"
#include "jni_helper.h"
#include <stdio.h>
#include <string.h>

View File

@ -17,7 +17,7 @@
*/
#include "config.h"
#include "hdfsJniHelper.h"
#include "jni_helper.h"
#include <stdio.h>
#include <string.h>

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
#include "hdfsJniHelper.h"
#include "jni_helper.h"
#include "native_mini_dfs.h"
#include <errno.h>