HADOOP-8562. Merge r1453486 from trunk
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1485845 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
35dc9545e1
commit
a1235b5ba9
67
BUILDING.txt
67
BUILDING.txt
|
@ -138,3 +138,70 @@ Create a local staging version of the website (in /tmp/hadoop-site)
|
||||||
$ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
|
$ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
|
||||||
|
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
Building on Windows
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------
|
||||||
|
Requirements:
|
||||||
|
|
||||||
|
* Windows System
|
||||||
|
* JDK 1.6
|
||||||
|
* Maven 3.0
|
||||||
|
* Findbugs 1.3.9 (if running findbugs)
|
||||||
|
* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS)
|
||||||
|
* Unix command-line tools from GnuWin32 or Cygwin: sh, mkdir, rm, cp, tar, gzip
|
||||||
|
* Windows SDK or Visual Studio 2010 Professional
|
||||||
|
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
|
||||||
|
|
||||||
|
If using Visual Studio, it must be Visual Studio 2010 Professional (not 2012).
|
||||||
|
Do not use Visual Studio Express. It does not support compiling for 64-bit,
|
||||||
|
which is problematic if running a 64-bit system. The Windows SDK is free to
|
||||||
|
download here:
|
||||||
|
|
||||||
|
http://www.microsoft.com/en-us/download/details.aspx?id=8279
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------
|
||||||
|
Building:
|
||||||
|
|
||||||
|
Keep the source code tree in a short path to avoid running into problems related
|
||||||
|
to Windows maximum path length limitation. (For example, C:\hdc).
|
||||||
|
|
||||||
|
Run builds from a Windows SDK Command Prompt. (Start, All Programs,
|
||||||
|
Microsoft Windows SDK v7.1, Windows SDK 7.1 Command Prompt.)
|
||||||
|
|
||||||
|
JAVA_HOME must be set, and the path must not contain spaces. If the full path
|
||||||
|
would contain spaces, then use the Windows short path instead.
|
||||||
|
|
||||||
|
You must set the Platform environment variable to either x64 or Win32 depending
|
||||||
|
on whether you're running a 64-bit or 32-bit system. Note that this is
|
||||||
|
case-sensitive. It must be "Platform", not "PLATFORM" or "platform".
|
||||||
|
Environment variables on Windows are usually case-insensitive, but Maven treats
|
||||||
|
them as case-sensitive. Failure to set this environment variable correctly will
|
||||||
|
cause msbuild to fail while building the native code in hadoop-common.
|
||||||
|
|
||||||
|
set Platform=x64 (when building on a 64-bit system)
|
||||||
|
set Platform=Win32 (when building on a 32-bit system)
|
||||||
|
|
||||||
|
Several tests require that the user must have the Create Symbolic Links
|
||||||
|
privilege.
|
||||||
|
|
||||||
|
All Maven goals are the same as described above, with the addition of profile
|
||||||
|
-Pnative-win to trigger building Windows native components. The native
|
||||||
|
components are required (not optional) on Windows. For example:
|
||||||
|
|
||||||
|
* Run tests : mvn -Pnative-win test
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------
|
||||||
|
Building distributions:
|
||||||
|
|
||||||
|
Create binary distribution with native code and with documentation:
|
||||||
|
|
||||||
|
$ mvn package -Pdist,native-win,docs -DskipTests -Dtar
|
||||||
|
|
||||||
|
Create source distribution:
|
||||||
|
|
||||||
|
$ mvn package -Pnative-win,src -DskipTests
|
||||||
|
|
||||||
|
Create source and binary distributions with native code and documentation:
|
||||||
|
|
||||||
|
$ mvn package -Pdist,native-win,docs,src -DskipTests -Dtar
|
||||||
|
|
|
@ -26,6 +26,9 @@
|
||||||
<outputDirectory>/bin</outputDirectory>
|
<outputDirectory>/bin</outputDirectory>
|
||||||
<excludes>
|
<excludes>
|
||||||
<exclude>*.sh</exclude>
|
<exclude>*.sh</exclude>
|
||||||
|
<exclude>*-config.cmd</exclude>
|
||||||
|
<exclude>start-*.cmd</exclude>
|
||||||
|
<exclude>stop-*.cmd</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
<fileMode>0755</fileMode>
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
@ -38,6 +41,7 @@
|
||||||
<outputDirectory>/libexec</outputDirectory>
|
<outputDirectory>/libexec</outputDirectory>
|
||||||
<includes>
|
<includes>
|
||||||
<include>*-config.sh</include>
|
<include>*-config.sh</include>
|
||||||
|
<include>*-config.cmd</include>
|
||||||
</includes>
|
</includes>
|
||||||
<fileMode>0755</fileMode>
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
@ -46,9 +50,13 @@
|
||||||
<outputDirectory>/sbin</outputDirectory>
|
<outputDirectory>/sbin</outputDirectory>
|
||||||
<includes>
|
<includes>
|
||||||
<include>*.sh</include>
|
<include>*.sh</include>
|
||||||
|
<include>*.cmd</include>
|
||||||
</includes>
|
</includes>
|
||||||
<excludes>
|
<excludes>
|
||||||
<exclude>hadoop-config.sh</exclude>
|
<exclude>hadoop-config.sh</exclude>
|
||||||
|
<exclude>hadoop.cmd</exclude>
|
||||||
|
<exclude>hdfs.cmd</exclude>
|
||||||
|
<exclude>hadoop-config.cmd</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
<fileMode>0755</fileMode>
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
|
|
@ -33,6 +33,7 @@
|
||||||
<outputDirectory>bin</outputDirectory>
|
<outputDirectory>bin</outputDirectory>
|
||||||
<includes>
|
<includes>
|
||||||
<include>yarn</include>
|
<include>yarn</include>
|
||||||
|
<include>yarn.cmd</include>
|
||||||
</includes>
|
</includes>
|
||||||
<fileMode>0755</fileMode>
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
@ -41,6 +42,7 @@
|
||||||
<outputDirectory>libexec</outputDirectory>
|
<outputDirectory>libexec</outputDirectory>
|
||||||
<includes>
|
<includes>
|
||||||
<include>yarn-config.sh</include>
|
<include>yarn-config.sh</include>
|
||||||
|
<include>yarn-config.cmd</include>
|
||||||
</includes>
|
</includes>
|
||||||
<fileMode>0755</fileMode>
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
@ -52,6 +54,8 @@
|
||||||
<include>yarn-daemons.sh</include>
|
<include>yarn-daemons.sh</include>
|
||||||
<include>start-yarn.sh</include>
|
<include>start-yarn.sh</include>
|
||||||
<include>stop-yarn.sh</include>
|
<include>stop-yarn.sh</include>
|
||||||
|
<include>start-yarn.cmd</include>
|
||||||
|
<include>stop-yarn.cmd</include>
|
||||||
</includes>
|
</includes>
|
||||||
<fileMode>0755</fileMode>
|
<fileMode>0755</fileMode>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
branch-trunk-win changes - unreleased
|
||||||
|
|
||||||
|
HADOOP-8924. Hadoop Common creating package-info.java must not depend on sh.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8945. Merge winutils from branch-1-win to branch-trunk-win.
|
||||||
|
(Bikas Saha, Chuan Liu, Giridharan Kesavan, Ivan Mitic, and Steve Maine
|
||||||
|
ported by Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8946. winutils: compile codebase during Maven build on
|
||||||
|
branch-trunk-win. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8947. Merge FileUtil and Shell changes from branch-1-win to
|
||||||
|
branch-trunk-win to enable initial test pass. (Raja Aluri, Davio Lao,
|
||||||
|
Sumadhur Reddy Bolli, Ahmed El Baz, Kanna Karanam, Chuan Liu,
|
||||||
|
Ivan Mitic, Chris Nauroth, and Bikas Saha via suresh)
|
||||||
|
|
||||||
|
HADOOP-8954. "stat" executable not found on Windows. (Bikas Saha, Ivan Mitic
|
||||||
|
ported by Chris Narouth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8959. TestUserGroupInformation fails on Windows due to "id" executable
|
||||||
|
not found. (Bikas Saha, Ivan Mitic, ported by Chris Narouth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8955. "chmod" executable not found on Windows.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8960. TestMetricsServlet fails on Windows. (Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
HADOOP-8961. GenericOptionsParser URI parsing failure on Windows.
|
||||||
|
(Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
HADOOP-8949. Remove FileUtil.CygPathCommand dead code. (Chris Nauroth via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HADOOP-8956. FileSystem.primitiveMkdir failures on Windows cause multiple
|
||||||
|
test suites to fail. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8978. TestTrash fails on Windows. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8979. TestHttpServer fails on Windows. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-8953. Shell PathData parsing failures on Windows. (Arpit Agarwal via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HADOOP-8975. TestFileContextResolveAfs fails on Windows. (Chris Nauroth via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HADOOP-8977. Multiple FsShell test failures on Windows. (Chris Nauroth via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HADOOP-9005. Merge hadoop cmd line scripts from branch-1-win. (David Lao,
|
||||||
|
Bikas Saha, Lauren Yang, Chuan Liu, Thejas M Nair and Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
HADOOP-9008. Building hadoop tarball fails on Windows. (Chris Nauroth via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HADOOP-9011. saveVersion.py does not include branch in version annotation.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9110. winutils ls off-by-one error indexing MONTHS array can cause
|
||||||
|
access violation. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9056. Build native library on Windows. (Chuan Liu, Arpit Agarwal via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HADOOP-9144. Fix findbugs warnings. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9081. Add TestWinUtils. (Chuan Liu, Ivan Mitic, Chris Nauroth,
|
||||||
|
and Bikas Saha via suresh)
|
||||||
|
|
||||||
|
HADOOP-9146. Fix sticky bit regression on branch-trunk-win.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9266. Fix javac, findbugs, and release audit warnings on
|
||||||
|
branch-trunk-win. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9270. Remove a stale java comment from FileUtil. (Chris Nauroth via
|
||||||
|
szetszwo)
|
||||||
|
|
||||||
|
HADOOP-9271. Revert Python build scripts from branch-trunk-win.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9313. Remove spurious mkdir from hadoop-config.cmd.
|
||||||
|
(Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
HADOOP-9309. Test failures on Windows due to UnsatisfiedLinkError
|
||||||
|
in NativeCodeLoader#buildSupportsSnappy. (Arpit Agarwal via suresh)
|
||||||
|
|
||||||
|
HADOOP-9347. Add instructions to BUILDING.txt describing how to
|
||||||
|
build on Windows. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9348. Address TODO in winutils to add more command line usage
|
||||||
|
and examples. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9354. Windows native project files missing license headers.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9356. Remove remaining references to cygwin/cygpath from scripts.
|
||||||
|
(Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HADOOP-9232. JniBasedUnixGroupsMappingWithFallback fails on Windows
|
||||||
|
with UnsatisfiedLinkError. (Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
HADOOP-9368. Add timeouts to new tests in branch-trunk-win.
|
||||||
|
(Arpit Agarwal via suresh)
|
||||||
|
|
||||||
|
Patch equivalent to trunk committed to branch-trunk-win
|
||||||
|
|
||||||
|
HADOOP-8924. Add maven plugin alternative to shell script to save
|
||||||
|
package-info.java. (Chris Nauroth via suresh)
|
||||||
|
|
|
@ -503,15 +503,15 @@ Release 2.0.3-alpha - 2013-02-06
|
||||||
HADOOP-9124. SortedMapWritable violates contract of Map interface for
|
HADOOP-9124. SortedMapWritable violates contract of Map interface for
|
||||||
equals() and hashCode(). (Surenkumar Nihalani via tomwhite)
|
equals() and hashCode(). (Surenkumar Nihalani via tomwhite)
|
||||||
|
|
||||||
|
HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in
|
||||||
|
HarFileSystem. (Chris Nauroth via szetszwo)
|
||||||
|
|
||||||
HADOOP-9252. In StringUtils, humanReadableInt(..) has a race condition and
|
HADOOP-9252. In StringUtils, humanReadableInt(..) has a race condition and
|
||||||
the synchronization of limitDecimalTo2(double) can be avoided. (szetszwo)
|
the synchronization of limitDecimalTo2(double) can be avoided. (szetszwo)
|
||||||
|
|
||||||
HADOOP-9260. Hadoop version may be not correct when starting name node or
|
HADOOP-9260. Hadoop version may be not correct when starting name node or
|
||||||
data node. (Chris Nauroth via jlowe)
|
data node. (Chris Nauroth via jlowe)
|
||||||
|
|
||||||
HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in
|
|
||||||
HarFileSystem. (Chris Nauroth via szetszwo)
|
|
||||||
|
|
||||||
HADOOP-9289. FsShell rm -f fails for non-matching globs. (Daryn Sharp via
|
HADOOP-9289. FsShell rm -f fails for non-matching globs. (Daryn Sharp via
|
||||||
suresh)
|
suresh)
|
||||||
|
|
||||||
|
|
|
@ -415,6 +415,7 @@
|
||||||
<configuration>
|
<configuration>
|
||||||
<excludes>
|
<excludes>
|
||||||
<exclude>CHANGES.txt</exclude>
|
<exclude>CHANGES.txt</exclude>
|
||||||
|
<exclude>CHANGES.branch-trunk-win.txt</exclude>
|
||||||
<exclude>.idea/**</exclude>
|
<exclude>.idea/**</exclude>
|
||||||
<exclude>src/main/conf/*</exclude>
|
<exclude>src/main/conf/*</exclude>
|
||||||
<exclude>src/main/docs/**</exclude>
|
<exclude>src/main/docs/**</exclude>
|
||||||
|
@ -459,6 +460,28 @@
|
||||||
</properties>
|
</properties>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>enforce-os</id>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<requireOS>
|
||||||
|
<family>mac</family>
|
||||||
|
<family>unix</family>
|
||||||
|
<message>native build only supported on Mac or Unix</message>
|
||||||
|
</requireOS>
|
||||||
|
</rules>
|
||||||
|
<fail>true</fail>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>native-maven-plugin</artifactId>
|
<artifactId>native-maven-plugin</artifactId>
|
||||||
|
@ -531,5 +554,103 @@
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
|
|
||||||
|
<profile>
|
||||||
|
<id>native-win</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>false</activeByDefault>
|
||||||
|
</activation>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-enforcer-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>enforce-os</id>
|
||||||
|
<goals>
|
||||||
|
<goal>enforce</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<rules>
|
||||||
|
<requireOS>
|
||||||
|
<family>windows</family>
|
||||||
|
<message>native-win build only supported on Windows</message>
|
||||||
|
</requireOS>
|
||||||
|
</rules>
|
||||||
|
<fail>true</fail>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>native-maven-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>compile</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>javah</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<javahPath>${env.JAVA_HOME}/bin/javah</javahPath>
|
||||||
|
<javahClassNames>
|
||||||
|
<javahClassName>org.apache.hadoop.io.compress.zlib.ZlibCompressor</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.io.compress.zlib.ZlibDecompressor</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsMapping</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.io.nativeio.NativeIO</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.io.compress.snappy.SnappyCompressor</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
|
||||||
|
<javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
|
||||||
|
</javahClassNames>
|
||||||
|
<javahOutputDirectory>${project.build.directory}/native/javah</javahOutputDirectory>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>compile-ms-winutils</id>
|
||||||
|
<phase>compile</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>exec</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<executable>msbuild</executable>
|
||||||
|
<arguments>
|
||||||
|
<argument>${basedir}/src/main/winutils/winutils.sln</argument>
|
||||||
|
<argument>/nologo</argument>
|
||||||
|
<argument>/p:Configuration=Release</argument>
|
||||||
|
<argument>/p:OutDir=${project.build.directory}/bin/</argument>
|
||||||
|
</arguments>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
<execution>
|
||||||
|
<id>compile-ms-native-dll</id>
|
||||||
|
<phase>compile</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>exec</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<executable>msbuild</executable>
|
||||||
|
<arguments>
|
||||||
|
<argument>${basedir}/src/main/native/native.sln</argument>
|
||||||
|
<argument>/nologo</argument>
|
||||||
|
<argument>/p:Configuration=Release</argument>
|
||||||
|
<argument>/p:OutDir=${project.build.directory}/bin/</argument>
|
||||||
|
</arguments>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
</profiles>
|
</profiles>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -91,9 +91,6 @@ case $COMMAND in
|
||||||
;;
|
;;
|
||||||
|
|
||||||
classpath)
|
classpath)
|
||||||
if $cygwin; then
|
|
||||||
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
|
||||||
fi
|
|
||||||
echo $CLASSPATH
|
echo $CLASSPATH
|
||||||
exit
|
exit
|
||||||
;;
|
;;
|
||||||
|
@ -132,9 +129,6 @@ case $COMMAND in
|
||||||
#make sure security appender is turned off
|
#make sure security appender is turned off
|
||||||
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
|
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
|
||||||
|
|
||||||
if $cygwin; then
|
|
||||||
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
|
||||||
fi
|
|
||||||
export CLASSPATH=$CLASSPATH
|
export CLASSPATH=$CLASSPATH
|
||||||
exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
|
exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
|
||||||
;;
|
;;
|
||||||
|
|
|
@ -0,0 +1,292 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
|
||||||
|
@rem included in all the hadoop scripts with source command
|
||||||
|
@rem should not be executable directly
|
||||||
|
@rem also should not be passed any arguments, since we need original %*
|
||||||
|
|
||||||
|
if not defined HADOOP_COMMON_DIR (
|
||||||
|
set HADOOP_COMMON_DIR=share\hadoop\common
|
||||||
|
)
|
||||||
|
if not defined HADOOP_COMMON_LIB_JARS_DIR (
|
||||||
|
set HADOOP_COMMON_LIB_JARS_DIR=share\hadoop\common\lib
|
||||||
|
)
|
||||||
|
if not defined HADOOP_COMMON_LIB_NATIVE_DIR (
|
||||||
|
set HADOOP_COMMON_LIB_NATIVE_DIR=lib\native
|
||||||
|
)
|
||||||
|
if not defined HDFS_DIR (
|
||||||
|
set HDFS_DIR=share\hadoop\hdfs
|
||||||
|
)
|
||||||
|
if not defined HDFS_LIB_JARS_DIR (
|
||||||
|
set HDFS_LIB_JARS_DIR=share\hadoop\hdfs\lib
|
||||||
|
)
|
||||||
|
if not defined YARN_DIR (
|
||||||
|
set YARN_DIR=share\hadoop\yarn
|
||||||
|
)
|
||||||
|
if not defined YARN_LIB_JARS_DIR (
|
||||||
|
set YARN_LIB_JARS_DIR=share\hadoop\yarn\lib
|
||||||
|
)
|
||||||
|
if not defined MAPRED_DIR (
|
||||||
|
set MAPRED_DIR=share\hadoop\mapreduce
|
||||||
|
)
|
||||||
|
if not defined MAPRED_LIB_JARS_DIR (
|
||||||
|
set MAPRED_LIB_JARS_DIR=share\hadoop\mapreduce\lib
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem the root of the Hadoop installation
|
||||||
|
set HADOOP_HOME=%~dp0
|
||||||
|
for %%i in (%HADOOP_HOME%.) do (
|
||||||
|
set HADOOP_HOME=%%~dpi
|
||||||
|
)
|
||||||
|
if "%HADOOP_HOME:~-1%" == "\" (
|
||||||
|
set HADOOP_HOME=%HADOOP_HOME:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
if not exist %HADOOP_HOME%\share\hadoop\common\hadoop-common-*.jar (
|
||||||
|
@echo +================================================================+
|
||||||
|
@echo ^| Error: HADOOP_HOME is not set correctly ^|
|
||||||
|
@echo +----------------------------------------------------------------+
|
||||||
|
@echo ^| Please set your HADOOP_HOME variable to the absolute path of ^|
|
||||||
|
@echo ^| the directory that contains the hadoop distribution ^|
|
||||||
|
@echo +================================================================+
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
set HADOOP_CONF_DIR=%HADOOP_HOME%\etc\hadoop
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem Allow alternate conf dir location.
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if "%1" == "--config" (
|
||||||
|
set HADOOP_CONF_DIR=%2
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem check to see it is specified whether to use the slaves or the
|
||||||
|
@rem masters file
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if "%1" == "--hosts" (
|
||||||
|
set HADOOP_SLAVES=%HADOOP_CONF_DIR%\%2
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_CONF_DIR%\hadoop-env.cmd (
|
||||||
|
call %HADOOP_CONF_DIR%\hadoop-env.cmd
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem setup java environment variables
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined JAVA_HOME (
|
||||||
|
echo Error: JAVA_HOME is not set.
|
||||||
|
goto :eof
|
||||||
|
)
|
||||||
|
|
||||||
|
if not exist %JAVA_HOME%\bin\java.exe (
|
||||||
|
echo Error: JAVA_HOME is incorrectly set.
|
||||||
|
echo Please update %HADOOP_HOME%\conf\hadoop-env.cmd
|
||||||
|
goto :eof
|
||||||
|
)
|
||||||
|
|
||||||
|
set JAVA=%JAVA_HOME%\bin\java
|
||||||
|
@rem some Java parameters
|
||||||
|
set JAVA_HEAP_MAX=-Xmx1000m
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem check envvars which might override default args
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if defined HADOOP_HEAPSIZE (
|
||||||
|
set JAVA_HEAP_MAX=-Xmx%HADOOP_HEAPSIZE%m
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem CLASSPATH initially contains %HADOOP_CONF_DIR%
|
||||||
|
@rem
|
||||||
|
|
||||||
|
set CLASSPATH=%HADOOP_CONF_DIR%
|
||||||
|
|
||||||
|
if not defined HADOOP_COMMON_HOME (
|
||||||
|
if exist %HADOOP_HOME%\share\hadoop\common (
|
||||||
|
set HADOOP_COMMON_HOME=%HADOOP_HOME%
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem for releases, add core hadoop jar & webapps to CLASSPATH
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if exist %HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%\webapps (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_COMMON_HOME%\%HADOOP_COMMON_LIB_JARS_DIR% (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_LIB_JARS_DIR%\*
|
||||||
|
)
|
||||||
|
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%\*
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem add user-specified CLASSPATH last
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if defined HADOOP_CLASSPATH (
|
||||||
|
if defined HADOOP_USER_CLASSPATH_FIRST (
|
||||||
|
set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;
|
||||||
|
) else (
|
||||||
|
set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem default log directory % file
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined HADOOP_LOG_DIR (
|
||||||
|
set HADOOP_LOG_DIR=%HADOOP_HOME%\logs
|
||||||
|
)
|
||||||
|
|
||||||
|
if not defined HADOOP_LOGFILE (
|
||||||
|
set HADOOP_LOGFILE=hadoop.log
|
||||||
|
)
|
||||||
|
|
||||||
|
if not defined HADOOP_ROOT_LOGGER (
|
||||||
|
set HADOOP_ROOT_LOGGER=INFO,console
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem default policy file for service-level authorization
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined HADOOP_POLICYFILE (
|
||||||
|
set HADOOP_POLICYFILE=hadoop-policy.xml
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem Determine the JAVA_PLATFORM
|
||||||
|
@rem
|
||||||
|
|
||||||
|
for /f "delims=" %%A in ('%JAVA% -Xmx32m %HADOOP_JAVA_PLATFORM_OPTS% -classpath "%CLASSPATH%" org.apache.hadoop.util.PlatformName') do set JAVA_PLATFORM=%%A
|
||||||
|
@rem replace space with underscore
|
||||||
|
set JAVA_PLATFORM=%JAVA_PLATFORM: =_%
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem setup 'java.library.path' for native hadoop code if necessary
|
||||||
|
@rem
|
||||||
|
|
||||||
|
@rem Check if we're running hadoop directly from the build
|
||||||
|
set JAVA_LIBRARY_PATH=
|
||||||
|
if exist %HADOOP_CORE_HOME%\target\bin (
|
||||||
|
set JAVA_LIBRARY_PATH=%HADOOP_CORE_HOME%\target\bin
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem For the distro case, check the bin folder
|
||||||
|
if exist %HADOOP_CORE_HOME%\bin (
|
||||||
|
set JAVA_LIBRARY_PATH=%JAVA_LIBRARY_PATH%;%HADOOP_CORE_HOME%\bin
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem setup a default TOOL_PATH
|
||||||
|
@rem
|
||||||
|
set TOOL_PATH=%HADOOP_HOME%\share\hadoop\tools\lib\*
|
||||||
|
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.dir=%HADOOP_LOG_DIR%
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.file=%HADOOP_LOGFILE%
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.home.dir=%HADOOP_HOME%
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.id.str=%HADOOP_IDENT_STRING%
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.root.logger=%HADOOP_ROOT_LOGGER%
|
||||||
|
|
||||||
|
if defined JAVA_LIBRARY_PATH (
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Djava.library.path=%JAVA_LIBRARY_PATH%
|
||||||
|
)
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.policy.file=%HADOOP_POLICYFILE%
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem Disable ipv6 as it can cause issues
|
||||||
|
@rem
|
||||||
|
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Djava.net.preferIPv4Stack=true
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem put hdfs in classpath if present
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined HADOOP_HDFS_HOME (
|
||||||
|
if exist %HADOOP_HOME%\%HDFS_DIR% (
|
||||||
|
set HADOOP_HDFS_HOME=%HADOOP_HOME%
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_HDFS_HOME%\%HDFS_DIR%\webapps (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_HDFS_HOME%\%HDFS_LIB_JARS_DIR% (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_LIB_JARS_DIR%\*
|
||||||
|
)
|
||||||
|
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_DIR%\*
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem put yarn in classpath if present
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined HADOOP_YARN_HOME (
|
||||||
|
if exist %HADOOP_HOME%\%YARN_DIR% (
|
||||||
|
set HADOOP_YARN_HOME=%HADOOP_HOME%
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_YARN_HOME%\%YARN_DIR%\webapps (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR% (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR%\*
|
||||||
|
)
|
||||||
|
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_DIR%\*
|
||||||
|
|
||||||
|
@rem
|
||||||
|
@rem put mapred in classpath if present AND different from YARN
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined HADOOP_MAPRED_HOME (
|
||||||
|
if exist %HADOOP_HOME%\%MAPRED_DIR% (
|
||||||
|
set HADOOP_MAPRED_HOME=%HADOOP_HOME%
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not "%HADOOP_MAPRED_HOME%\%MAPRED_DIR%" == "%HADOOP_YARN_HOME%\%YARN_DIR%" (
|
||||||
|
|
||||||
|
if exist %HADOOP_MAPRED_HOME%\%MAPRED_DIR%\webapps (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR% (
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR%\*
|
||||||
|
)
|
||||||
|
|
||||||
|
set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_DIR%\*
|
||||||
|
)
|
||||||
|
|
||||||
|
:eof
|
|
@ -108,11 +108,6 @@ if [[ ( "$HADOOP_SLAVES" != '' ) && ( "$HADOOP_SLAVE_NAMES" != '' ) ]] ; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cygwin=false
|
|
||||||
case "`uname`" in
|
|
||||||
CYGWIN*) cygwin=true;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
|
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
|
||||||
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
|
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
|
||||||
fi
|
fi
|
||||||
|
@ -199,13 +194,6 @@ fi
|
||||||
# restore ordinary behaviour
|
# restore ordinary behaviour
|
||||||
unset IFS
|
unset IFS
|
||||||
|
|
||||||
# cygwin path translation
|
|
||||||
if $cygwin; then
|
|
||||||
HADOOP_PREFIX=`cygpath -w "$HADOOP_PREFIX"`
|
|
||||||
HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
|
|
||||||
JAVA_LIBRARY_PATH=`cygpath -w "$JAVA_LIBRARY_PATH"`
|
|
||||||
fi
|
|
||||||
|
|
||||||
# setup 'java.library.path' for native-hadoop code if necessary
|
# setup 'java.library.path' for native-hadoop code if necessary
|
||||||
|
|
||||||
if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR" ]; then
|
if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR" ]; then
|
||||||
|
@ -222,11 +210,6 @@ fi
|
||||||
# setup a default TOOL_PATH
|
# setup a default TOOL_PATH
|
||||||
TOOL_PATH="${TOOL_PATH:-$HADOOP_PREFIX/share/hadoop/tools/lib/*}"
|
TOOL_PATH="${TOOL_PATH:-$HADOOP_PREFIX/share/hadoop/tools/lib/*}"
|
||||||
|
|
||||||
# cygwin path translation
|
|
||||||
if $cygwin; then
|
|
||||||
JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
|
|
||||||
fi
|
|
||||||
|
|
||||||
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
|
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
|
||||||
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
|
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
|
||||||
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_PREFIX"
|
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_PREFIX"
|
||||||
|
@ -306,12 +289,3 @@ if [ "$HADOOP_CLASSPATH" != "" ]; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# cygwin path translation
|
|
||||||
if $cygwin; then
|
|
||||||
HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`
|
|
||||||
fi
|
|
||||||
|
|
||||||
# cygwin path translation
|
|
||||||
if $cygwin; then
|
|
||||||
TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
|
|
||||||
fi
|
|
||||||
|
|
|
@ -0,0 +1,235 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
@rem This script runs the hadoop core commands.
|
||||||
|
|
||||||
|
@rem Environment Variables
|
||||||
|
@rem
|
||||||
|
@rem JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_CLASSPATH Extra Java CLASSPATH entries.
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_USER_CLASSPATH_FIRST When defined, the HADOOP_CLASSPATH is
|
||||||
|
@rem added in the beginning of the global
|
||||||
|
@rem classpath. Can be defined, for example,
|
||||||
|
@rem by doing
|
||||||
|
@rem export HADOOP_USER_CLASSPATH_FIRST=true
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_HEAPSIZE The maximum amount of heap to use, in MB.
|
||||||
|
@rem Default is 1000.
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_OPTS Extra Java runtime options.
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_CLIENT_OPTS when the respective command is run.
|
||||||
|
@rem HADOOP_{COMMAND}_OPTS etc HADOOP_JT_OPTS applies to JobTracker
|
||||||
|
@rem for e.g. HADOOP_CLIENT_OPTS applies to
|
||||||
|
@rem more than one command (fs, dfs, fsck,
|
||||||
|
@rem dfsadmin etc)
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
|
||||||
|
@rem
|
||||||
|
@rem HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
|
||||||
|
@rem
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
call :updatepath %HADOOP_BIN_PATH%
|
||||||
|
|
||||||
|
:main
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
set hadoop-command=%1
|
||||||
|
if not defined hadoop-command (
|
||||||
|
goto print_usage
|
||||||
|
)
|
||||||
|
|
||||||
|
call :make_command_arguments %*
|
||||||
|
|
||||||
|
set hdfscommands=namenode secondarynamenode datanode dfs dfsadmin fsck balancer fetchdt oiv dfsgroups
|
||||||
|
for %%i in ( %hdfscommands% ) do (
|
||||||
|
if %hadoop-command% == %%i set hdfscommand=true
|
||||||
|
)
|
||||||
|
if defined hdfscommand (
|
||||||
|
@echo DEPRECATED: Use of this script to execute hdfs command is deprecated. 1>&2
|
||||||
|
@echo Instead use the hdfs command for it. 1>&2
|
||||||
|
if exist %HADOOP_HDFS_HOME%\bin\hdfs.cmd (
|
||||||
|
call %HADOOP_HDFS_HOME%\bin\hdfs.cmd %*
|
||||||
|
goto :eof
|
||||||
|
) else if exist %HADOOP_HOME%\bin\hdfs.cmd (
|
||||||
|
call %HADOOP_HOME%\bin\hdfs.cmd %*
|
||||||
|
goto :eof
|
||||||
|
) else (
|
||||||
|
echo HADOOP_HDFS_HOME not found!
|
||||||
|
goto :eof
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
set mapredcommands=pipes job queue mrgroups mradmin jobtracker tasktracker
|
||||||
|
for %%i in ( %mapredcommands% ) do (
|
||||||
|
if %hadoop-command% == %%i set mapredcommand=true
|
||||||
|
)
|
||||||
|
if defined mapredcommand (
|
||||||
|
@echo DEPRECATED: Use of this script to execute mapred command is deprecated. 1>&2
|
||||||
|
@echo Instead use the mapred command for it. 1>&2
|
||||||
|
if exist %HADOOP_MAPRED_HOME%\bin\mapred.cmd (
|
||||||
|
call %HADOOP_MAPRED_HOME%\bin\mapred.cmd %*
|
||||||
|
goto :eof
|
||||||
|
) else if exist %HADOOP_HOME%\bin\mapred.cmd (
|
||||||
|
call %HADOOP_HOME%\bin\mapred.cmd %*
|
||||||
|
goto :eof
|
||||||
|
) else (
|
||||||
|
echo HADOOP_MAPRED_HOME not found!
|
||||||
|
goto :eof
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if %hadoop-command% == classpath (
|
||||||
|
@echo %CLASSPATH%
|
||||||
|
goto :eof
|
||||||
|
)
|
||||||
|
|
||||||
|
set corecommands=fs version jar distcp daemonlog archive
|
||||||
|
for %%i in ( %corecommands% ) do (
|
||||||
|
if %hadoop-command% == %%i set corecommand=true
|
||||||
|
)
|
||||||
|
if defined corecommand (
|
||||||
|
call :%hadoop-command%
|
||||||
|
) else (
|
||||||
|
set CLASSPATH=%CLASSPATH%;%CD%
|
||||||
|
set CLASS=%hadoop-command%
|
||||||
|
)
|
||||||
|
|
||||||
|
set path=%PATH%;%HADOOP_BIN_PATH%
|
||||||
|
|
||||||
|
@rem Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
|
||||||
|
|
||||||
|
@rem make sure security appender is turned off
|
||||||
|
if not defined HADOOP_SECURITY_LOGGER (
|
||||||
|
set HADOOP_SECURITY_LOGGER=INFO,NullAppender
|
||||||
|
)
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER%
|
||||||
|
|
||||||
|
call %JAVA% %JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hadoop-command-arguments%
|
||||||
|
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:fs
|
||||||
|
set CLASS=org.apache.hadoop.fs.FsShell
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:version
|
||||||
|
set CLASS=org.apache.hadoop.util.VersionInfo
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:jar
|
||||||
|
set CLASS=org.apache.hadoop.util.RunJar
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:distcp
|
||||||
|
set CLASS=org.apache.hadoop.tools.DistCp
|
||||||
|
set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:daemonlog
|
||||||
|
set CLASS=org.apache.hadoop.log.LogLevel
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:archive
|
||||||
|
set CLASS=org.apache.hadoop.tools.HadoopArchives
|
||||||
|
set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:updatepath
|
||||||
|
set path_to_add=%*
|
||||||
|
set current_path_comparable=%path%
|
||||||
|
set current_path_comparable=%current_path_comparable: =_%
|
||||||
|
set current_path_comparable=%current_path_comparable:(=_%
|
||||||
|
set current_path_comparable=%current_path_comparable:)=_%
|
||||||
|
set path_to_add_comparable=%path_to_add%
|
||||||
|
set path_to_add_comparable=%path_to_add_comparable: =_%
|
||||||
|
set path_to_add_comparable=%path_to_add_comparable:(=_%
|
||||||
|
set path_to_add_comparable=%path_to_add_comparable:)=_%
|
||||||
|
|
||||||
|
for %%i in ( %current_path_comparable% ) do (
|
||||||
|
if /i "%%i" == "%path_to_add_comparable%" (
|
||||||
|
set path_to_add_exist=true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
set system_path_comparable=
|
||||||
|
set path_to_add_comparable=
|
||||||
|
if not defined path_to_add_exist path=%path_to_add%;%path%
|
||||||
|
set path_to_add=
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
@rem This changes %1, %2 etc. Hence those cannot be used after calling this.
|
||||||
|
:make_command_arguments
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
if [%2] == [] goto :eof
|
||||||
|
shift
|
||||||
|
set _arguments=
|
||||||
|
:MakeCmdArgsLoop
|
||||||
|
if [%1]==[] goto :EndLoop
|
||||||
|
|
||||||
|
if not defined _arguments (
|
||||||
|
set _arguments=%1
|
||||||
|
) else (
|
||||||
|
set _arguments=!_arguments! %1
|
||||||
|
)
|
||||||
|
shift
|
||||||
|
goto :MakeCmdArgsLoop
|
||||||
|
:EndLoop
|
||||||
|
set hadoop-command-arguments=%_arguments%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:print_usage
|
||||||
|
@echo Usage: hadoop [--config confdir] COMMAND
|
||||||
|
@echo where COMMAND is one of:
|
||||||
|
@echo fs run a generic filesystem user client
|
||||||
|
@echo version print the version
|
||||||
|
@echo jar ^<jar^> run a jar file
|
||||||
|
@echo distcp ^<srcurl^> ^<desturl^> copy file or directories recursively
|
||||||
|
@echo archive -archiveName NAME -p ^<parent path^> ^<src^>* ^<dest^> create a hadoop archive
|
||||||
|
@echo classpath prints the class path needed to get the
|
||||||
|
@echo Hadoop jar and the required libraries
|
||||||
|
@echo daemonlog get/set the log level for each daemon
|
||||||
|
@echo or
|
||||||
|
@echo CLASSNAME run the class named CLASSNAME
|
||||||
|
@echo.
|
||||||
|
@echo Most commands print help when invoked w/o parameters.
|
||||||
|
|
||||||
|
endlocal
|
|
@ -57,10 +57,5 @@ unset IFS
|
||||||
|
|
||||||
CLASS='org.apache.hadoop.record.compiler.generated.Rcc'
|
CLASS='org.apache.hadoop.record.compiler.generated.Rcc'
|
||||||
|
|
||||||
# cygwin path translation
|
|
||||||
if expr `uname` : 'CYGWIN*' > /dev/null; then
|
|
||||||
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
|
||||||
fi
|
|
||||||
|
|
||||||
# run it
|
# run it
|
||||||
exec "$JAVA" $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
|
exec "$JAVA" $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
@rem Start all hadoop daemons. Run this on master node.
|
||||||
|
|
||||||
|
echo This script is Deprecated. Instead use start-dfs.cmd and start-yarn.cmd
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem start hdfs daemons if hdfs is present
|
||||||
|
if exist %HADOOP_HDFS_HOME%\sbin\start-dfs.cmd (
|
||||||
|
call %HADOOP_HDFS_HOME%\sbin\start-dfs.cmd --config %HADOOP_CONF_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem start yarn daemons if yarn is present
|
||||||
|
if exist %HADOOP_YARN_HOME%\sbin\start-yarn.cmd (
|
||||||
|
call %HADOOP_YARN_HOME%\sbin\start-yarn.cmd --config %HADOOP_CONF_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
endlocal
|
|
@ -0,0 +1,52 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
@rem Stop all hadoop daemons. Run this on master node.
|
||||||
|
|
||||||
|
echo This script is Deprecated. Instead use stop-dfs.cmd and stop-yarn.cmd
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem stop hdfs daemons if hdfs is present
|
||||||
|
if exist %HADOOP_HDFS_HOME%\sbin\stop-dfs.cmd (
|
||||||
|
call %HADOOP_HDFS_HOME%\sbin\stop-dfs.cmd --config %HADOOP_CONF_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem stop yarn daemons if yarn is present
|
||||||
|
if exist %HADOOP_YARN_HOME%\sbin\stop-yarn.cmd (
|
||||||
|
call %HADOOP_YARN_HOME%\sbin\stop-yarn.cmd --config %HADOOP_CONF_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
endlocal
|
|
@ -0,0 +1,81 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
|
||||||
|
@rem Set Hadoop-specific environment variables here.
|
||||||
|
|
||||||
|
@rem The only required environment variable is JAVA_HOME. All others are
|
||||||
|
@rem optional. When running a distributed configuration it is best to
|
||||||
|
@rem set JAVA_HOME in this file, so that it is correctly defined on
|
||||||
|
@rem remote nodes.
|
||||||
|
|
||||||
|
@rem The java implementation to use. Required.
|
||||||
|
set JAVA_HOME=%JAVA_HOME%
|
||||||
|
|
||||||
|
@rem The jsvc implementation to use. Jsvc is required to run secure datanodes.
|
||||||
|
@rem set JSVC_HOME=%JSVC_HOME%
|
||||||
|
|
||||||
|
@rem set HADOOP_CONF_DIR=
|
||||||
|
|
||||||
|
@rem Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
|
||||||
|
if exist %HADOOP_HOME%\contrib\capacity-scheduler (
|
||||||
|
if not defined HADOOP_CLASSPATH (
|
||||||
|
set HADOOP_CLASSPATH=%HADOOP_HOME%\contrib\capacity-scheduler\*.jar
|
||||||
|
) else (
|
||||||
|
set HADOOP_CLASSPATH=%HADOOP_CLASSPATH%;%HADOOP_HOME%\contrib\capacity-scheduler\*.jar
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@rem The maximum amount of heap to use, in MB. Default is 1000.
|
||||||
|
@rem set HADOOP_HEAPSIZE=
|
||||||
|
@rem set HADOOP_NAMENODE_INIT_HEAPSIZE=""
|
||||||
|
|
||||||
|
@rem Extra Java runtime options. Empty by default.
|
||||||
|
@rem set HADOOP_OPTS=-Djava.net.preferIPv4Stack=true %HADOOP_CLIENT_OPTS%
|
||||||
|
|
||||||
|
@rem Command specific options appended to HADOOP_OPTS when specified
|
||||||
|
if not defined HADOOP_SECURITY_LOGGER (
|
||||||
|
set HADOOP_SECURITY_LOGGER=INFO,RFAS
|
||||||
|
)
|
||||||
|
if not defined HDFS_AUDIT_LOGGER (
|
||||||
|
set HDFS_AUDIT_LOGGER=INFO,NullAppender
|
||||||
|
)
|
||||||
|
|
||||||
|
set HADOOP_NAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_NAMENODE_OPTS%
|
||||||
|
set HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS %HADOOP_DATANODE_OPTS%
|
||||||
|
set HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_SECONDARYNAMENODE_OPTS%
|
||||||
|
|
||||||
|
@rem The following applies to multiple commands (fs, dfs, fsck, distcp etc)
|
||||||
|
set HADOOP_CLIENT_OPTS=-Xmx128m %HADOOP_CLIENT_OPTS%
|
||||||
|
@rem set HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData %HADOOP_JAVA_PLATFORM_OPTS%"
|
||||||
|
|
||||||
|
@rem On secure datanodes, user to run the datanode as after dropping privileges
|
||||||
|
set HADOOP_SECURE_DN_USER=%HADOOP_SECURE_DN_USER%
|
||||||
|
|
||||||
|
@rem Where log files are stored. %HADOOP_HOME%/logs by default.
|
||||||
|
@rem set HADOOP_LOG_DIR=%HADOOP_LOG_DIR%\%USERNAME%
|
||||||
|
|
||||||
|
@rem Where log files are stored in the secure data environment.
|
||||||
|
set HADOOP_SECURE_DN_LOG_DIR=%HADOOP_LOG_DIR%\%HADOOP_HDFS_USER%
|
||||||
|
|
||||||
|
@rem The directory where pid files are stored. /tmp by default.
|
||||||
|
@rem NOTE: this should be set to a directory that can only be written to by
|
||||||
|
@rem the user that will run the hadoop daemons. Otherwise there is the
|
||||||
|
@rem potential for a symlink attack.
|
||||||
|
set HADOOP_PID_DIR=%HADOOP_PID_DIR%
|
||||||
|
set HADOOP_SECURE_DN_PID_DIR=%HADOOP_PID_DIR%
|
||||||
|
|
||||||
|
@rem A string representing this instance of hadoop. %USERNAME% by default.
|
||||||
|
set HADOOP_IDENT_STRING=%USERNAME%
|
|
@ -87,7 +87,6 @@ See http://forrest.apache.org/docs/linking.html for more info.
|
||||||
<zlib href="http://www.zlib.net/" />
|
<zlib href="http://www.zlib.net/" />
|
||||||
<gzip href="http://www.gzip.org/" />
|
<gzip href="http://www.gzip.org/" />
|
||||||
<bzip href="http://www.bzip.org/" />
|
<bzip href="http://www.bzip.org/" />
|
||||||
<cygwin href="http://www.cygwin.com/" />
|
|
||||||
<osx href="http://www.apple.com/macosx" />
|
<osx href="http://www.apple.com/macosx" />
|
||||||
|
|
||||||
<relnotes href="releasenotes.html" />
|
<relnotes href="releasenotes.html" />
|
||||||
|
|
|
@ -35,7 +35,7 @@
|
||||||
|
|
||||||
/** Filesystem disk space usage statistics.
|
/** Filesystem disk space usage statistics.
|
||||||
* Uses the unix 'df' program to get mount points, and java.io.File for
|
* Uses the unix 'df' program to get mount points, and java.io.File for
|
||||||
* space utilization. Tested on Linux, FreeBSD, Cygwin. */
|
* space utilization. Tested on Linux, FreeBSD, Windows. */
|
||||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public class DF extends Shell {
|
public class DF extends Shell {
|
||||||
|
@ -163,10 +163,23 @@ public String toString() {
|
||||||
mount;
|
mount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void run() throws IOException {
|
||||||
|
if (WINDOWS) {
|
||||||
|
try {
|
||||||
|
this.mount = dirFile.getCanonicalPath().substring(0,2);
|
||||||
|
} catch (IOException e) {
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
super.run();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String[] getExecString() {
|
protected String[] getExecString() {
|
||||||
// ignoring the error since the exit code it enough
|
// ignoring the error since the exit code it enough
|
||||||
return new String[] {"bash","-c","exec 'df' '-k' '-P' '" + dirPath
|
return (WINDOWS)? new String[]{"cmd", "/c", "df -k " + dirPath + " 2>nul"}:
|
||||||
|
new String[] {"bash","-c","exec 'df' '-k' '-P' '" + dirPath
|
||||||
+ "' 2>/dev/null"};
|
+ "' 2>/dev/null"};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -146,6 +146,20 @@ public String getDirPath() {
|
||||||
return dirPath;
|
return dirPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override to hook in DUHelper class. Maybe this can be used more
|
||||||
|
* generally as well on Unix/Linux based systems
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
protected void run() throws IOException {
|
||||||
|
if (WINDOWS) {
|
||||||
|
used.set(DUHelper.getFolderUsage(dirPath));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
super.run();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start the disk usage checking thread.
|
* Start the disk usage checking thread.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.fs;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
|
public class DUHelper {
|
||||||
|
|
||||||
|
private int folderCount=0;
|
||||||
|
private int fileCount=0;
|
||||||
|
private double usage = 0;
|
||||||
|
private long folderSize = -1;
|
||||||
|
|
||||||
|
private DUHelper() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long getFolderUsage(String folder) {
|
||||||
|
return new DUHelper().calculateFolderSize(folder);
|
||||||
|
}
|
||||||
|
|
||||||
|
private long calculateFolderSize(String folder) {
|
||||||
|
if (folder == null)
|
||||||
|
throw new IllegalArgumentException("folder");
|
||||||
|
File f = new File(folder);
|
||||||
|
return folderSize = getFileSize(f);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String check(String folder) {
|
||||||
|
if (folder == null)
|
||||||
|
throw new IllegalArgumentException("folder");
|
||||||
|
File f = new File(folder);
|
||||||
|
|
||||||
|
folderSize = getFileSize(f);
|
||||||
|
usage = 1.0*(f.getTotalSpace() - f.getFreeSpace())/ f.getTotalSpace();
|
||||||
|
return String.format("used %d files %d disk in use %f", folderSize, fileCount, usage);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getFileCount() {
|
||||||
|
return fileCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getUsage() {
|
||||||
|
return usage;
|
||||||
|
}
|
||||||
|
|
||||||
|
private long getFileSize(File folder) {
|
||||||
|
|
||||||
|
folderCount++;
|
||||||
|
//Counting the total folders
|
||||||
|
long foldersize = 0;
|
||||||
|
if (folder.isFile())
|
||||||
|
return folder.length();
|
||||||
|
File[] filelist = folder.listFiles();
|
||||||
|
if (filelist == null) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
for (int i = 0; i < filelist.length; i++) {
|
||||||
|
if (filelist[i].isDirectory()) {
|
||||||
|
foldersize += getFileSize(filelist[i]);
|
||||||
|
} else {
|
||||||
|
fileCount++; //Counting the total files
|
||||||
|
foldersize += filelist[i].length();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return foldersize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
if (Shell.WINDOWS)
|
||||||
|
System.out.println("Windows: "+ DUHelper.getFolderUsage(args[0]));
|
||||||
|
else
|
||||||
|
System.out.println("Other: " + DUHelper.getFolderUsage(args[0]));
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,15 +19,26 @@
|
||||||
package org.apache.hadoop.fs;
|
package org.apache.hadoop.fs;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.jar.Attributes;
|
||||||
|
import java.util.jar.JarOutputStream;
|
||||||
|
import java.util.jar.Manifest;
|
||||||
import java.util.zip.ZipEntry;
|
import java.util.zip.ZipEntry;
|
||||||
import java.util.zip.ZipFile;
|
import java.util.zip.ZipFile;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.map.CaseInsensitiveMap;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Shell;
|
import org.apache.hadoop.util.Shell;
|
||||||
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
|
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
|
||||||
|
|
||||||
|
@ -43,6 +54,13 @@ public class FileUtil {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(FileUtil.class);
|
private static final Log LOG = LogFactory.getLog(FileUtil.class);
|
||||||
|
|
||||||
|
/* The error code is defined in winutils to indicate insufficient
|
||||||
|
* privilege to create symbolic links. This value need to keep in
|
||||||
|
* sync with the constant of the same name in:
|
||||||
|
* "src\winutils\common.h"
|
||||||
|
* */
|
||||||
|
public static final int SYMLINK_NO_PRIVILEGE = 2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* convert an array of FileStatus to an array of Path
|
* convert an array of FileStatus to an array of Path
|
||||||
*
|
*
|
||||||
|
@ -466,34 +484,6 @@ private static Path checkDest(String srcName, FileSystem dstFS, Path dst,
|
||||||
return dst;
|
return dst;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This class is only used on windows to invoke the cygpath command.
|
|
||||||
*/
|
|
||||||
private static class CygPathCommand extends Shell {
|
|
||||||
String[] command;
|
|
||||||
String result;
|
|
||||||
CygPathCommand(String path) throws IOException {
|
|
||||||
command = new String[]{"cygpath", "-u", path};
|
|
||||||
run();
|
|
||||||
}
|
|
||||||
String getResult() throws IOException {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
@Override
|
|
||||||
protected String[] getExecString() {
|
|
||||||
return command;
|
|
||||||
}
|
|
||||||
@Override
|
|
||||||
protected void parseExecResult(BufferedReader lines) throws IOException {
|
|
||||||
String line = lines.readLine();
|
|
||||||
if (line == null) {
|
|
||||||
throw new IOException("Can't convert '" + command[2] +
|
|
||||||
" to a cygwin path");
|
|
||||||
}
|
|
||||||
result = line;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a os-native filename to a path that works for the shell.
|
* Convert a os-native filename to a path that works for the shell.
|
||||||
* @param filename The filename to convert
|
* @param filename The filename to convert
|
||||||
|
@ -501,12 +491,8 @@ protected void parseExecResult(BufferedReader lines) throws IOException {
|
||||||
* @throws IOException on windows, there can be problems with the subprocess
|
* @throws IOException on windows, there can be problems with the subprocess
|
||||||
*/
|
*/
|
||||||
public static String makeShellPath(String filename) throws IOException {
|
public static String makeShellPath(String filename) throws IOException {
|
||||||
if (Path.WINDOWS) {
|
|
||||||
return new CygPathCommand(filename).getResult();
|
|
||||||
} else {
|
|
||||||
return filename;
|
return filename;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a os-native filename to a path that works for the shell.
|
* Convert a os-native filename to a path that works for the shell.
|
||||||
|
@ -659,7 +645,7 @@ public static void unTar(File inFile, File untarDir) throws IOException {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for creating hardlinks.
|
* Class for creating hardlinks.
|
||||||
* Supports Unix, Cygwin, WindXP.
|
* Supports Unix, WindXP.
|
||||||
* @deprecated Use {@link org.apache.hadoop.fs.HardLink}
|
* @deprecated Use {@link org.apache.hadoop.fs.HardLink}
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
|
@ -671,21 +657,67 @@ public static class HardLink extends org.apache.hadoop.fs.HardLink {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a soft link between a src and destination
|
* Create a soft link between a src and destination
|
||||||
* only on a local disk. HDFS does not support this
|
* only on a local disk. HDFS does not support this.
|
||||||
|
* On Windows, when symlink creation fails due to security
|
||||||
|
* setting, we will log a warning. The return code in this
|
||||||
|
* case is 2.
|
||||||
* @param target the target for symlink
|
* @param target the target for symlink
|
||||||
* @param linkname the symlink
|
* @param linkname the symlink
|
||||||
* @return value returned by the command
|
* @return value returned by the command
|
||||||
*/
|
*/
|
||||||
public static int symLink(String target, String linkname) throws IOException{
|
public static int symLink(String target, String linkname) throws IOException{
|
||||||
String cmd = "ln -s " + target + " " + linkname;
|
// Run the input paths through Java's File so that they are converted to the
|
||||||
Process p = Runtime.getRuntime().exec(cmd, null);
|
// native OS form
|
||||||
int returnVal = -1;
|
File targetFile = new File(target);
|
||||||
|
File linkFile = new File(linkname);
|
||||||
|
|
||||||
|
// If not on Java7+, copy a file instead of creating a symlink since
|
||||||
|
// Java6 has close to no support for symlinks on Windows. Specifically
|
||||||
|
// File#length and File#renameTo do not work as expected.
|
||||||
|
// (see HADOOP-9061 for additional details)
|
||||||
|
// We still create symlinks for directories, since the scenario in this
|
||||||
|
// case is different. The directory content could change in which
|
||||||
|
// case the symlink loses its purpose (for example task attempt log folder
|
||||||
|
// is symlinked under userlogs and userlogs are generated afterwards).
|
||||||
|
if (Shell.WINDOWS && !Shell.isJava7OrAbove() && targetFile.isFile()) {
|
||||||
try {
|
try {
|
||||||
returnVal = p.waitFor();
|
LOG.info("FileUtil#symlink: On Java6, copying file instead "
|
||||||
} catch(InterruptedException e){
|
+ linkname + " -> " + target);
|
||||||
//do nothing as of yet
|
org.apache.commons.io.FileUtils.copyFile(targetFile, linkFile);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
LOG.warn("FileUtil#symlink failed to copy the file with error: "
|
||||||
|
+ ex.getMessage());
|
||||||
|
// Exit with non-zero exit code
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] cmd = Shell.getSymlinkCommand(targetFile.getPath(),
|
||||||
|
linkFile.getPath());
|
||||||
|
ShellCommandExecutor shExec = new ShellCommandExecutor(cmd);
|
||||||
|
try {
|
||||||
|
shExec.execute();
|
||||||
|
} catch (Shell.ExitCodeException ec) {
|
||||||
|
int returnVal = ec.getExitCode();
|
||||||
|
if (Shell.WINDOWS && returnVal == SYMLINK_NO_PRIVILEGE) {
|
||||||
|
LOG.warn("Fail to create symbolic links on Windows. "
|
||||||
|
+ "The default security settings in Windows disallow non-elevated "
|
||||||
|
+ "administrators and all non-administrators from creating symbolic links. "
|
||||||
|
+ "This behavior can be changed in the Local Security Policy management console");
|
||||||
|
} else if (returnVal != 0) {
|
||||||
|
LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed "
|
||||||
|
+ returnVal + " with: " + ec.getMessage());
|
||||||
}
|
}
|
||||||
return returnVal;
|
return returnVal;
|
||||||
|
} catch (IOException e) {
|
||||||
|
if (LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug("Error while create symlink " + linkname + " to " + target
|
||||||
|
+ "." + " Exception: " + StringUtils.stringifyException(e));
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
return shExec.getExitCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -709,30 +741,120 @@ public static int chmod(String filename, String perm
|
||||||
* @param recursive true, if permissions should be changed recursively
|
* @param recursive true, if permissions should be changed recursively
|
||||||
* @return the exit code from the command.
|
* @return the exit code from the command.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws InterruptedException
|
|
||||||
*/
|
*/
|
||||||
public static int chmod(String filename, String perm, boolean recursive)
|
public static int chmod(String filename, String perm, boolean recursive)
|
||||||
throws IOException, InterruptedException {
|
throws IOException {
|
||||||
StringBuilder cmdBuf = new StringBuilder();
|
String [] cmd = Shell.getSetPermissionCommand(perm, recursive);
|
||||||
cmdBuf.append("chmod ");
|
String[] args = new String[cmd.length + 1];
|
||||||
if (recursive) {
|
System.arraycopy(cmd, 0, args, 0, cmd.length);
|
||||||
cmdBuf.append("-R ");
|
args[cmd.length] = new File(filename).getPath();
|
||||||
}
|
ShellCommandExecutor shExec = new ShellCommandExecutor(args);
|
||||||
cmdBuf.append(perm).append(" ");
|
|
||||||
cmdBuf.append(filename);
|
|
||||||
String[] shellCmd = {"bash", "-c" ,cmdBuf.toString()};
|
|
||||||
ShellCommandExecutor shExec = new ShellCommandExecutor(shellCmd);
|
|
||||||
try {
|
try {
|
||||||
shExec.execute();
|
shExec.execute();
|
||||||
}catch(Exception e) {
|
}catch(IOException e) {
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Error while changing permission : " + filename
|
LOG.debug("Error while changing permission : " + filename
|
||||||
+ " Exception: ", e);
|
+" Exception: " + StringUtils.stringifyException(e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return shExec.getExitCode();
|
return shExec.getExitCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the ownership on a file / directory. User name and group name
|
||||||
|
* cannot both be null.
|
||||||
|
* @param file the file to change
|
||||||
|
* @param username the new user owner name
|
||||||
|
* @param groupname the new group owner name
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void setOwner(File file, String username,
|
||||||
|
String groupname) throws IOException {
|
||||||
|
if (username == null && groupname == null) {
|
||||||
|
throw new IOException("username == null && groupname == null");
|
||||||
|
}
|
||||||
|
String arg = (username == null ? "" : username)
|
||||||
|
+ (groupname == null ? "" : ":" + groupname);
|
||||||
|
String [] cmd = Shell.getSetOwnerCommand(arg);
|
||||||
|
execCommand(file, cmd);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set permissions to the required value. Uses the java primitives instead
|
||||||
|
* of forking if group == other.
|
||||||
|
* @param f the file to change
|
||||||
|
* @param permission the new permissions
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void setPermission(File f, FsPermission permission
|
||||||
|
) throws IOException {
|
||||||
|
FsAction user = permission.getUserAction();
|
||||||
|
FsAction group = permission.getGroupAction();
|
||||||
|
FsAction other = permission.getOtherAction();
|
||||||
|
|
||||||
|
// use the native/fork if the group/other permissions are different
|
||||||
|
// or if the native is available or on Windows
|
||||||
|
if (group != other || NativeIO.isAvailable() || Shell.WINDOWS) {
|
||||||
|
execSetPermission(f, permission);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean rv = true;
|
||||||
|
|
||||||
|
// read perms
|
||||||
|
rv = f.setReadable(group.implies(FsAction.READ), false);
|
||||||
|
checkReturnValue(rv, f, permission);
|
||||||
|
if (group.implies(FsAction.READ) != user.implies(FsAction.READ)) {
|
||||||
|
rv = f.setReadable(user.implies(FsAction.READ), true);
|
||||||
|
checkReturnValue(rv, f, permission);
|
||||||
|
}
|
||||||
|
|
||||||
|
// write perms
|
||||||
|
rv = f.setWritable(group.implies(FsAction.WRITE), false);
|
||||||
|
checkReturnValue(rv, f, permission);
|
||||||
|
if (group.implies(FsAction.WRITE) != user.implies(FsAction.WRITE)) {
|
||||||
|
rv = f.setWritable(user.implies(FsAction.WRITE), true);
|
||||||
|
checkReturnValue(rv, f, permission);
|
||||||
|
}
|
||||||
|
|
||||||
|
// exec perms
|
||||||
|
rv = f.setExecutable(group.implies(FsAction.EXECUTE), false);
|
||||||
|
checkReturnValue(rv, f, permission);
|
||||||
|
if (group.implies(FsAction.EXECUTE) != user.implies(FsAction.EXECUTE)) {
|
||||||
|
rv = f.setExecutable(user.implies(FsAction.EXECUTE), true);
|
||||||
|
checkReturnValue(rv, f, permission);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void checkReturnValue(boolean rv, File p,
|
||||||
|
FsPermission permission
|
||||||
|
) throws IOException {
|
||||||
|
if (!rv) {
|
||||||
|
throw new IOException("Failed to set permissions of path: " + p +
|
||||||
|
" to " +
|
||||||
|
String.format("%04o", permission.toShort()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void execSetPermission(File f,
|
||||||
|
FsPermission permission
|
||||||
|
) throws IOException {
|
||||||
|
if (NativeIO.isAvailable()) {
|
||||||
|
NativeIO.POSIX.chmod(f.getCanonicalPath(), permission.toShort());
|
||||||
|
} else {
|
||||||
|
execCommand(f, Shell.getSetPermissionCommand(
|
||||||
|
String.format("%04o", permission.toShort()), false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static String execCommand(File f, String... cmd) throws IOException {
|
||||||
|
String[] args = new String[cmd.length + 1];
|
||||||
|
System.arraycopy(cmd, 0, args, 0, cmd.length);
|
||||||
|
args[cmd.length] = f.getCanonicalPath();
|
||||||
|
String output = Shell.execCommand(args);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a tmp file for a base file.
|
* Create a tmp file for a base file.
|
||||||
* @param basefile the base file of the tmp
|
* @param basefile the base file of the tmp
|
||||||
|
@ -820,4 +942,97 @@ public static String[] list(File dir) throws IOException {
|
||||||
}
|
}
|
||||||
return fileNames;
|
return fileNames;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a jar file at the given path, containing a manifest with a classpath
|
||||||
|
* that references all specified entries.
|
||||||
|
*
|
||||||
|
* Some platforms may have an upper limit on command line length. For example,
|
||||||
|
* the maximum command line length on Windows is 8191 characters, but the
|
||||||
|
* length of the classpath may exceed this. To work around this limitation,
|
||||||
|
* use this method to create a small intermediate jar with a manifest that
|
||||||
|
* contains the full classpath. It returns the absolute path to the new jar,
|
||||||
|
* which the caller may set as the classpath for a new process.
|
||||||
|
*
|
||||||
|
* Environment variable evaluation is not supported within a jar manifest, so
|
||||||
|
* this method expands environment variables before inserting classpath entries
|
||||||
|
* to the manifest. The method parses environment variables according to
|
||||||
|
* platform-specific syntax (%VAR% on Windows, or $VAR otherwise). On Windows,
|
||||||
|
* environment variables are case-insensitive. For example, %VAR% and %var%
|
||||||
|
* evaluate to the same value.
|
||||||
|
*
|
||||||
|
* Specifying the classpath in a jar manifest does not support wildcards, so
|
||||||
|
* this method expands wildcards internally. Any classpath entry that ends
|
||||||
|
* with * is translated to all files at that path with extension .jar or .JAR.
|
||||||
|
*
|
||||||
|
* @param inputClassPath String input classpath to bundle into the jar manifest
|
||||||
|
* @param pwd Path to working directory to save jar
|
||||||
|
* @return String absolute path to new jar
|
||||||
|
* @throws IOException if there is an I/O error while writing the jar file
|
||||||
|
*/
|
||||||
|
public static String createJarWithClassPath(String inputClassPath, Path pwd)
|
||||||
|
throws IOException {
|
||||||
|
// Replace environment variables, case-insensitive on Windows
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Map<String, String> env = Shell.WINDOWS ?
|
||||||
|
new CaseInsensitiveMap(System.getenv()) : System.getenv();
|
||||||
|
String[] classPathEntries = inputClassPath.split(File.pathSeparator);
|
||||||
|
for (int i = 0; i < classPathEntries.length; ++i) {
|
||||||
|
classPathEntries[i] = StringUtils.replaceTokens(classPathEntries[i],
|
||||||
|
StringUtils.ENV_VAR_PATTERN, env);
|
||||||
|
}
|
||||||
|
File workingDir = new File(pwd.toString());
|
||||||
|
if (!workingDir.mkdirs()) {
|
||||||
|
// If mkdirs returns false because the working directory already exists,
|
||||||
|
// then this is acceptable. If it returns false due to some other I/O
|
||||||
|
// error, then this method will fail later with an IOException while saving
|
||||||
|
// the jar.
|
||||||
|
LOG.debug("mkdirs false for " + workingDir + ", execution will continue");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append all entries
|
||||||
|
List<String> classPathEntryList = new ArrayList<String>(
|
||||||
|
classPathEntries.length);
|
||||||
|
for (String classPathEntry: classPathEntries) {
|
||||||
|
if (classPathEntry.endsWith("*")) {
|
||||||
|
// Append all jars that match the wildcard
|
||||||
|
Path globPath = new Path(classPathEntry).suffix("{.jar,.JAR}");
|
||||||
|
FileStatus[] wildcardJars = FileContext.getLocalFSFileContext().util()
|
||||||
|
.globStatus(globPath);
|
||||||
|
if (wildcardJars != null) {
|
||||||
|
for (FileStatus wildcardJar: wildcardJars) {
|
||||||
|
classPathEntryList.add(wildcardJar.getPath().toUri().toURL()
|
||||||
|
.toExternalForm());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Append just this jar
|
||||||
|
classPathEntryList.add(new File(classPathEntry).toURI().toURL()
|
||||||
|
.toExternalForm());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String jarClassPath = StringUtils.join(" ", classPathEntryList);
|
||||||
|
|
||||||
|
// Create the manifest
|
||||||
|
Manifest jarManifest = new Manifest();
|
||||||
|
jarManifest.getMainAttributes().putValue(
|
||||||
|
Attributes.Name.MANIFEST_VERSION.toString(), "1.0");
|
||||||
|
jarManifest.getMainAttributes().putValue(
|
||||||
|
Attributes.Name.CLASS_PATH.toString(), jarClassPath);
|
||||||
|
|
||||||
|
// Write the manifest to output JAR file
|
||||||
|
File classPathJar = File.createTempFile("classpath-", ".jar", workingDir);
|
||||||
|
FileOutputStream fos = null;
|
||||||
|
BufferedOutputStream bos = null;
|
||||||
|
JarOutputStream jos = null;
|
||||||
|
try {
|
||||||
|
fos = new FileOutputStream(classPathJar);
|
||||||
|
bos = new BufferedOutputStream(fos);
|
||||||
|
jos = new JarOutputStream(bos, jarManifest);
|
||||||
|
} finally {
|
||||||
|
IOUtils.cleanup(LOG, jos, bos, fos);
|
||||||
|
}
|
||||||
|
|
||||||
|
return classPathJar.getCanonicalPath();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,9 +25,11 @@
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for creating hardlinks.
|
* Class for creating hardlinks.
|
||||||
* Supports Unix/Linux, WinXP/2003/Vista via Cygwin, and Mac OS X.
|
* Supports Unix/Linux, Windows via winutils , and Mac OS X.
|
||||||
*
|
*
|
||||||
* The HardLink class was formerly a static inner class of FSUtil,
|
* The HardLink class was formerly a static inner class of FSUtil,
|
||||||
* and the methods provided were blatantly non-thread-safe.
|
* and the methods provided were blatantly non-thread-safe.
|
||||||
|
@ -41,7 +43,7 @@ public class HardLink {
|
||||||
|
|
||||||
public enum OSType {
|
public enum OSType {
|
||||||
OS_TYPE_UNIX,
|
OS_TYPE_UNIX,
|
||||||
OS_TYPE_WINXP,
|
OS_TYPE_WIN,
|
||||||
OS_TYPE_SOLARIS,
|
OS_TYPE_SOLARIS,
|
||||||
OS_TYPE_MAC,
|
OS_TYPE_MAC,
|
||||||
OS_TYPE_FREEBSD
|
OS_TYPE_FREEBSD
|
||||||
|
@ -56,7 +58,7 @@ public enum OSType {
|
||||||
//methods without instantiating the HardLink object
|
//methods without instantiating the HardLink object
|
||||||
static {
|
static {
|
||||||
osType = getOSType();
|
osType = getOSType();
|
||||||
if (osType == OSType.OS_TYPE_WINXP) {
|
if (osType == OSType.OS_TYPE_WIN) {
|
||||||
// Windows
|
// Windows
|
||||||
getHardLinkCommand = new HardLinkCGWin();
|
getHardLinkCommand = new HardLinkCGWin();
|
||||||
} else {
|
} else {
|
||||||
|
@ -80,14 +82,8 @@ public HardLink() {
|
||||||
|
|
||||||
static private OSType getOSType() {
|
static private OSType getOSType() {
|
||||||
String osName = System.getProperty("os.name");
|
String osName = System.getProperty("os.name");
|
||||||
if (osName.contains("Windows") &&
|
if (Shell.WINDOWS) {
|
||||||
(osName.contains("XP")
|
return OSType.OS_TYPE_WIN;
|
||||||
|| osName.contains("2003")
|
|
||||||
|| osName.contains("Vista")
|
|
||||||
|| osName.contains("Windows_7")
|
|
||||||
|| osName.contains("Windows 7")
|
|
||||||
|| osName.contains("Windows7"))) {
|
|
||||||
return OSType.OS_TYPE_WINXP;
|
|
||||||
}
|
}
|
||||||
else if (osName.contains("SunOS")
|
else if (osName.contains("SunOS")
|
||||||
|| osName.contains("Solaris")) {
|
|| osName.contains("Solaris")) {
|
||||||
|
@ -258,11 +254,6 @@ int getMaxAllowedCmdArgLength() {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementation of HardLinkCommandGetter class for Windows
|
* Implementation of HardLinkCommandGetter class for Windows
|
||||||
*
|
|
||||||
* Note that the linkCount shell command for Windows is actually
|
|
||||||
* a Cygwin shell command, and depends on ${cygwin}/bin
|
|
||||||
* being in the Windows PATH environment variable, so
|
|
||||||
* stat.exe can be found.
|
|
||||||
*/
|
*/
|
||||||
static class HardLinkCGWin extends HardLinkCommandGetter {
|
static class HardLinkCGWin extends HardLinkCommandGetter {
|
||||||
//The Windows command getter impl class and its member fields are
|
//The Windows command getter impl class and its member fields are
|
||||||
|
@ -270,14 +261,16 @@ static class HardLinkCGWin extends HardLinkCommandGetter {
|
||||||
//unit testing (sort of) on non-Win servers
|
//unit testing (sort of) on non-Win servers
|
||||||
|
|
||||||
static String[] hardLinkCommand = {
|
static String[] hardLinkCommand = {
|
||||||
"fsutil","hardlink","create", null, null};
|
Shell.WINUTILS,"hardlink","create", null, null};
|
||||||
static String[] hardLinkMultPrefix = {
|
static String[] hardLinkMultPrefix = {
|
||||||
"cmd","/q","/c","for", "%f", "in", "("};
|
"cmd","/q","/c","for", "%f", "in", "("};
|
||||||
static String hardLinkMultDir = "\\%f";
|
static String hardLinkMultDir = "\\%f";
|
||||||
static String[] hardLinkMultSuffix = {
|
static String[] hardLinkMultSuffix = {
|
||||||
")", "do", "fsutil", "hardlink", "create", null,
|
")", "do", Shell.WINUTILS, "hardlink", "create", null,
|
||||||
"%f", "1>NUL"};
|
"%f", "1>NUL"};
|
||||||
static String[] getLinkCountCommand = {"stat","-c%h", null};
|
static String[] getLinkCountCommand = {
|
||||||
|
Shell.WINUTILS, "hardlink",
|
||||||
|
"stat", null};
|
||||||
//Windows guarantees only 8K - 1 bytes cmd length.
|
//Windows guarantees only 8K - 1 bytes cmd length.
|
||||||
//Subtract another 64b to allow for Java 'exec' overhead
|
//Subtract another 64b to allow for Java 'exec' overhead
|
||||||
static final int maxAllowedCmdArgLength = 8*1024 - 65;
|
static final int maxAllowedCmdArgLength = 8*1024 - 65;
|
||||||
|
@ -328,12 +321,6 @@ String[] linkCount(File file)
|
||||||
String[] buf = new String[getLinkCountCommand.length];
|
String[] buf = new String[getLinkCountCommand.length];
|
||||||
System.arraycopy(getLinkCountCommand, 0, buf, 0,
|
System.arraycopy(getLinkCountCommand, 0, buf, 0,
|
||||||
getLinkCountCommand.length);
|
getLinkCountCommand.length);
|
||||||
//The linkCount command is actually a Cygwin shell command,
|
|
||||||
//not a Windows shell command, so we should use "makeShellPath()"
|
|
||||||
//instead of "getCanonicalPath()". However, that causes another
|
|
||||||
//shell exec to "cygpath.exe", and "stat.exe" actually can handle
|
|
||||||
//DOS-style paths (it just prints a couple hundred bytes of warning
|
|
||||||
//to stderr), so we use the more efficient "getCanonicalPath()".
|
|
||||||
buf[getLinkCountCommand.length - 1] = file.getCanonicalPath();
|
buf[getLinkCountCommand.length - 1] = file.getCanonicalPath();
|
||||||
return buf;
|
return buf;
|
||||||
}
|
}
|
||||||
|
@ -354,7 +341,7 @@ int getLinkMultArgLength(File fileDir, String[] fileBaseNames, File linkDir)
|
||||||
//add the fixed overhead of the hardLinkMult command
|
//add the fixed overhead of the hardLinkMult command
|
||||||
//(prefix, suffix, and Dir suffix)
|
//(prefix, suffix, and Dir suffix)
|
||||||
sum += ("cmd.exe /q /c for %f in ( ) do "
|
sum += ("cmd.exe /q /c for %f in ( ) do "
|
||||||
+ "fsutil hardlink create \\%f %f 1>NUL ").length();
|
+ Shell.WINUTILS + " hardlink create \\%f %f 1>NUL ").length();
|
||||||
return sum;
|
return sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -582,13 +569,9 @@ public static int getLinkCount(File fileName) throws IOException {
|
||||||
private static IOException createIOException(File f, String message,
|
private static IOException createIOException(File f, String message,
|
||||||
String error, int exitvalue, Exception cause) {
|
String error, int exitvalue, Exception cause) {
|
||||||
|
|
||||||
final String winErrMsg = "; Windows errors in getLinkCount are often due "
|
|
||||||
+ "to Cygwin misconfiguration";
|
|
||||||
|
|
||||||
final String s = "Failed to get link count on file " + f
|
final String s = "Failed to get link count on file " + f
|
||||||
+ ": message=" + message
|
+ ": message=" + message
|
||||||
+ "; error=" + error
|
+ "; error=" + error
|
||||||
+ ((osType == OSType.OS_TYPE_WINXP) ? winErrMsg : "")
|
|
||||||
+ "; exit value=" + exitvalue;
|
+ "; exit value=" + exitvalue;
|
||||||
return (cause == null) ? new IOException(s) : new IOException(s, cause);
|
return (cause == null) ? new IOException(s) : new IOException(s, cause);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.avro.reflect.Stringable;
|
import org.apache.avro.reflect.Stringable;
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
|
@ -43,9 +44,17 @@ public class Path implements Comparable {
|
||||||
|
|
||||||
public static final String CUR_DIR = ".";
|
public static final String CUR_DIR = ".";
|
||||||
|
|
||||||
static final boolean WINDOWS
|
public static final boolean WINDOWS
|
||||||
= System.getProperty("os.name").startsWith("Windows");
|
= System.getProperty("os.name").startsWith("Windows");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pre-compiled regular expressions to detect path formats.
|
||||||
|
*/
|
||||||
|
private static final Pattern hasUriScheme =
|
||||||
|
Pattern.compile("^[a-zA-Z][a-zA-Z0-9+-.]+:");
|
||||||
|
private static final Pattern hasDriveLetterSpecifier =
|
||||||
|
Pattern.compile("^/?[a-zA-Z]:");
|
||||||
|
|
||||||
private URI uri; // a hierarchical uri
|
private URI uri; // a hierarchical uri
|
||||||
|
|
||||||
/** Resolve a child path against a parent path. */
|
/** Resolve a child path against a parent path. */
|
||||||
|
@ -81,7 +90,7 @@ public Path(Path parent, Path child) {
|
||||||
resolved.getPath(), resolved.getFragment());
|
resolved.getPath(), resolved.getFragment());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkPathArg( String path ) {
|
private void checkPathArg( String path ) throws IllegalArgumentException {
|
||||||
// disallow construction of a Path from an empty string
|
// disallow construction of a Path from an empty string
|
||||||
if ( path == null ) {
|
if ( path == null ) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
|
@ -95,15 +104,16 @@ private void checkPathArg( String path ) {
|
||||||
|
|
||||||
/** Construct a path from a String. Path strings are URIs, but with
|
/** Construct a path from a String. Path strings are URIs, but with
|
||||||
* unescaped elements and some additional normalization. */
|
* unescaped elements and some additional normalization. */
|
||||||
public Path(String pathString) {
|
public Path(String pathString) throws IllegalArgumentException {
|
||||||
checkPathArg( pathString );
|
checkPathArg( pathString );
|
||||||
|
|
||||||
// We can't use 'new URI(String)' directly, since it assumes things are
|
// We can't use 'new URI(String)' directly, since it assumes things are
|
||||||
// escaped, which we don't require of Paths.
|
// escaped, which we don't require of Paths.
|
||||||
|
|
||||||
// add a slash in front of paths with Windows drive letters
|
// add a slash in front of paths with Windows drive letters
|
||||||
if (hasWindowsDrive(pathString, false))
|
if (hasWindowsDrive(pathString) && pathString.charAt(0) != '/') {
|
||||||
pathString = "/" + pathString;
|
pathString = "/" + pathString;
|
||||||
|
}
|
||||||
|
|
||||||
// parse uri components
|
// parse uri components
|
||||||
String scheme = null;
|
String scheme = null;
|
||||||
|
@ -151,22 +161,54 @@ public Path(String scheme, String authority, String path) {
|
||||||
private void initialize(String scheme, String authority, String path,
|
private void initialize(String scheme, String authority, String path,
|
||||||
String fragment) {
|
String fragment) {
|
||||||
try {
|
try {
|
||||||
this.uri = new URI(scheme, authority, normalizePath(path), null, fragment)
|
this.uri = new URI(scheme, authority, normalizePath(scheme, path), null, fragment)
|
||||||
.normalize();
|
.normalize();
|
||||||
} catch (URISyntaxException e) {
|
} catch (URISyntaxException e) {
|
||||||
throw new IllegalArgumentException(e);
|
throw new IllegalArgumentException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String normalizePath(String path) {
|
/**
|
||||||
// remove double slashes & backslashes
|
* Merge 2 paths such that the second path is appended relative to the first.
|
||||||
|
* The returned path has the scheme and authority of the first path. On
|
||||||
|
* Windows, the drive specification in the second path is discarded.
|
||||||
|
*
|
||||||
|
* @param path1 Path first path
|
||||||
|
* @param path2 Path second path, to be appended relative to path1
|
||||||
|
* @return Path merged path
|
||||||
|
*/
|
||||||
|
public static Path mergePaths(Path path1, Path path2) {
|
||||||
|
String path2Str = path2.toUri().getPath();
|
||||||
|
if(hasWindowsDrive(path2Str)) {
|
||||||
|
path2Str = path2Str.substring(path2Str.indexOf(':')+1);
|
||||||
|
}
|
||||||
|
return new Path(path1 + path2Str);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a path string to use non-duplicated forward slashes as
|
||||||
|
* the path separator and remove any trailing path separators.
|
||||||
|
* @param scheme Supplies the URI scheme. Used to deduce whether we
|
||||||
|
* should replace backslashes or not.
|
||||||
|
* @param path Supplies the scheme-specific part
|
||||||
|
* @return Normalized path string.
|
||||||
|
*/
|
||||||
|
private static String normalizePath(String scheme, String path) {
|
||||||
|
// Remove double forward slashes.
|
||||||
path = StringUtils.replace(path, "//", "/");
|
path = StringUtils.replace(path, "//", "/");
|
||||||
if (Path.WINDOWS) {
|
|
||||||
|
// Remove backslashes if this looks like a Windows path. Avoid
|
||||||
|
// the substitution if it looks like a non-local URI.
|
||||||
|
if (WINDOWS &&
|
||||||
|
(hasWindowsDrive(path) ||
|
||||||
|
(scheme == null) ||
|
||||||
|
(scheme.isEmpty()) ||
|
||||||
|
(scheme.equals("file")))) {
|
||||||
path = StringUtils.replace(path, "\\", "/");
|
path = StringUtils.replace(path, "\\", "/");
|
||||||
}
|
}
|
||||||
|
|
||||||
// trim trailing slash from non-root path (ignoring windows drive)
|
// trim trailing slash from non-root path (ignoring windows drive)
|
||||||
int minLength = hasWindowsDrive(path, true) ? 4 : 1;
|
int minLength = hasWindowsDrive(path) ? 4 : 1;
|
||||||
if (path.length() > minLength && path.endsWith("/")) {
|
if (path.length() > minLength && path.endsWith("/")) {
|
||||||
path = path.substring(0, path.length()-1);
|
path = path.substring(0, path.length()-1);
|
||||||
}
|
}
|
||||||
|
@ -174,17 +216,29 @@ private String normalizePath(String path) {
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean hasWindowsDrive(String path, boolean slashed) {
|
private static boolean hasWindowsDrive(String path) {
|
||||||
if (!WINDOWS) return false;
|
return (WINDOWS && hasDriveLetterSpecifier.matcher(path).find());
|
||||||
int start = slashed ? 1 : 0;
|
|
||||||
return
|
|
||||||
path.length() >= start+2 &&
|
|
||||||
(slashed ? path.charAt(0) == '/' : true) &&
|
|
||||||
path.charAt(start+1) == ':' &&
|
|
||||||
((path.charAt(start) >= 'A' && path.charAt(start) <= 'Z') ||
|
|
||||||
(path.charAt(start) >= 'a' && path.charAt(start) <= 'z'));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine whether a given path string represents an absolute path on
|
||||||
|
* Windows. e.g. "C:/a/b" is an absolute path. "C:a/b" is not.
|
||||||
|
*
|
||||||
|
* @param pathString Supplies the path string to evaluate.
|
||||||
|
* @param slashed true if the given path is prefixed with "/".
|
||||||
|
* @return true if the supplied path looks like an absolute path with a Windows
|
||||||
|
* drive-specifier.
|
||||||
|
*/
|
||||||
|
public static boolean isWindowsAbsolutePath(final String pathString,
|
||||||
|
final boolean slashed) {
|
||||||
|
int start = (slashed ? 1 : 0);
|
||||||
|
|
||||||
|
return
|
||||||
|
hasWindowsDrive(pathString) &&
|
||||||
|
pathString.length() >= (start + 3) &&
|
||||||
|
((pathString.charAt(start + 2) == SEPARATOR_CHAR) ||
|
||||||
|
(pathString.charAt(start + 2) == '\\'));
|
||||||
|
}
|
||||||
|
|
||||||
/** Convert this to a URI. */
|
/** Convert this to a URI. */
|
||||||
public URI toUri() { return uri; }
|
public URI toUri() { return uri; }
|
||||||
|
@ -207,7 +261,7 @@ public boolean isAbsoluteAndSchemeAuthorityNull() {
|
||||||
* True if the path component (i.e. directory) of this URI is absolute.
|
* True if the path component (i.e. directory) of this URI is absolute.
|
||||||
*/
|
*/
|
||||||
public boolean isUriPathAbsolute() {
|
public boolean isUriPathAbsolute() {
|
||||||
int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0;
|
int start = hasWindowsDrive(uri.getPath()) ? 3 : 0;
|
||||||
return uri.getPath().startsWith(SEPARATOR, start);
|
return uri.getPath().startsWith(SEPARATOR, start);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,7 +295,7 @@ public String getName() {
|
||||||
public Path getParent() {
|
public Path getParent() {
|
||||||
String path = uri.getPath();
|
String path = uri.getPath();
|
||||||
int lastSlash = path.lastIndexOf('/');
|
int lastSlash = path.lastIndexOf('/');
|
||||||
int start = hasWindowsDrive(path, true) ? 3 : 0;
|
int start = hasWindowsDrive(path) ? 3 : 0;
|
||||||
if ((path.length() == start) || // empty path
|
if ((path.length() == start) || // empty path
|
||||||
(lastSlash == start && path.length() == start+1)) { // at root
|
(lastSlash == start && path.length() == start+1)) { // at root
|
||||||
return null;
|
return null;
|
||||||
|
@ -250,7 +304,7 @@ public Path getParent() {
|
||||||
if (lastSlash==-1) {
|
if (lastSlash==-1) {
|
||||||
parent = CUR_DIR;
|
parent = CUR_DIR;
|
||||||
} else {
|
} else {
|
||||||
int end = hasWindowsDrive(path, true) ? 3 : 0;
|
int end = hasWindowsDrive(path) ? 3 : 0;
|
||||||
parent = path.substring(0, lastSlash==end?end+1:lastSlash);
|
parent = path.substring(0, lastSlash==end?end+1:lastSlash);
|
||||||
}
|
}
|
||||||
return new Path(uri.getScheme(), uri.getAuthority(), parent);
|
return new Path(uri.getScheme(), uri.getAuthority(), parent);
|
||||||
|
@ -277,7 +331,7 @@ public String toString() {
|
||||||
if (uri.getPath() != null) {
|
if (uri.getPath() != null) {
|
||||||
String path = uri.getPath();
|
String path = uri.getPath();
|
||||||
if (path.indexOf('/')==0 &&
|
if (path.indexOf('/')==0 &&
|
||||||
hasWindowsDrive(path, true) && // has windows drive
|
hasWindowsDrive(path) && // has windows drive
|
||||||
uri.getScheme() == null && // but no scheme
|
uri.getScheme() == null && // but no scheme
|
||||||
uri.getAuthority() == null) // or authority
|
uri.getAuthority() == null) // or authority
|
||||||
path = path.substring(1); // remove slash before drive
|
path = path.substring(1); // remove slash before drive
|
||||||
|
@ -364,7 +418,7 @@ public Path makeQualified(URI defaultUri, Path workingDir ) {
|
||||||
URI newUri = null;
|
URI newUri = null;
|
||||||
try {
|
try {
|
||||||
newUri = new URI(scheme, authority ,
|
newUri = new URI(scheme, authority ,
|
||||||
normalizePath(pathUri.getPath()), null, fragment);
|
normalizePath(scheme, pathUri.getPath()), null, fragment);
|
||||||
} catch (URISyntaxException e) {
|
} catch (URISyntaxException e) {
|
||||||
throw new IllegalArgumentException(e);
|
throw new IllegalArgumentException(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -504,7 +504,8 @@ private boolean isPermissionLoaded() {
|
||||||
|
|
||||||
RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
|
RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
|
||||||
super(f.length(), f.isDirectory(), 1, defaultBlockSize,
|
super(f.length(), f.isDirectory(), 1, defaultBlockSize,
|
||||||
f.lastModified(), fs.makeQualified(new Path(f.getPath())));
|
f.lastModified(), new Path(f.getPath()).makeQualified(fs.getUri(),
|
||||||
|
fs.getWorkingDirectory()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -535,9 +536,10 @@ public String getGroup() {
|
||||||
private void loadPermissionInfo() {
|
private void loadPermissionInfo() {
|
||||||
IOException e = null;
|
IOException e = null;
|
||||||
try {
|
try {
|
||||||
StringTokenizer t = new StringTokenizer(
|
String output = FileUtil.execCommand(new File(getPath().toUri()),
|
||||||
execCommand(new File(getPath().toUri()),
|
Shell.getGetPermissionCommand());
|
||||||
Shell.getGET_PERMISSION_COMMAND()));
|
StringTokenizer t =
|
||||||
|
new StringTokenizer(output, Shell.TOKEN_SEPARATOR_REGEX);
|
||||||
//expected format
|
//expected format
|
||||||
//-rw------- 1 username groupname ...
|
//-rw------- 1 username groupname ...
|
||||||
String permission = t.nextToken();
|
String permission = t.nextToken();
|
||||||
|
@ -546,7 +548,17 @@ private void loadPermissionInfo() {
|
||||||
}
|
}
|
||||||
setPermission(FsPermission.valueOf(permission));
|
setPermission(FsPermission.valueOf(permission));
|
||||||
t.nextToken();
|
t.nextToken();
|
||||||
setOwner(t.nextToken());
|
|
||||||
|
String owner = t.nextToken();
|
||||||
|
// If on windows domain, token format is DOMAIN\\user and we want to
|
||||||
|
// extract only the user name
|
||||||
|
if (Shell.WINDOWS) {
|
||||||
|
int i = owner.indexOf('\\');
|
||||||
|
if (i != -1)
|
||||||
|
owner = owner.substring(i + 1);
|
||||||
|
}
|
||||||
|
setOwner(owner);
|
||||||
|
|
||||||
setGroup(t.nextToken());
|
setGroup(t.nextToken());
|
||||||
} catch (Shell.ExitCodeException ioe) {
|
} catch (Shell.ExitCodeException ioe) {
|
||||||
if (ioe.getExitCode() != 1) {
|
if (ioe.getExitCode() != 1) {
|
||||||
|
@ -582,17 +594,7 @@ public void write(DataOutput out) throws IOException {
|
||||||
@Override
|
@Override
|
||||||
public void setOwner(Path p, String username, String groupname)
|
public void setOwner(Path p, String username, String groupname)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
if (username == null && groupname == null) {
|
FileUtil.setOwner(pathToFile(p), username, groupname);
|
||||||
throw new IOException("username == null && groupname == null");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (username == null) {
|
|
||||||
execCommand(pathToFile(p), Shell.SET_GROUP_COMMAND, groupname);
|
|
||||||
} else {
|
|
||||||
//OWNER[:[GROUP]]
|
|
||||||
String s = username + (groupname == null? "": ":" + groupname);
|
|
||||||
execCommand(pathToFile(p), Shell.SET_OWNER_COMMAND, s);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -602,11 +604,12 @@ public void setOwner(Path p, String username, String groupname)
|
||||||
public void setPermission(Path p, FsPermission permission)
|
public void setPermission(Path p, FsPermission permission)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
if (NativeIO.isAvailable()) {
|
if (NativeIO.isAvailable()) {
|
||||||
NativeIO.chmod(pathToFile(p).getCanonicalPath(),
|
NativeIO.POSIX.chmod(pathToFile(p).getCanonicalPath(),
|
||||||
permission.toShort());
|
permission.toShort());
|
||||||
} else {
|
} else {
|
||||||
execCommand(pathToFile(p), Shell.SET_PERMISSION_COMMAND,
|
String perm = String.format("%04o", permission.toShort());
|
||||||
String.format("%05o", permission.toShort()));
|
Shell.execCommand(Shell.getSetPermissionCommand(perm, false,
|
||||||
|
FileUtil.makeShellPath(pathToFile(p), true)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,7 @@ public void initialize(Configuration conf, FileSystem fs, Path home) {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Path makeTrashRelativePath(Path basePath, Path rmFilePath) {
|
private Path makeTrashRelativePath(Path basePath, Path rmFilePath) {
|
||||||
return new Path(basePath + rmFilePath.toUri().getPath());
|
return Path.mergePaths(basePath, rmFilePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -89,11 +89,9 @@ public void createSymlink(Path target, Path link, boolean createParent)
|
||||||
}
|
}
|
||||||
// NB: Use createSymbolicLink in java.nio.file.Path once available
|
// NB: Use createSymbolicLink in java.nio.file.Path once available
|
||||||
try {
|
try {
|
||||||
Shell.execCommand(Shell.LINK_COMMAND, "-s",
|
Shell.execCommand(Shell.getSymlinkCommand(
|
||||||
new URI(target.toString()).getPath(),
|
getPathWithoutSchemeAndAuthority(target),
|
||||||
new URI(link.toString()).getPath());
|
getPathWithoutSchemeAndAuthority(link)));
|
||||||
} catch (URISyntaxException x) {
|
|
||||||
throw new IOException("Invalid symlink path: "+x.getMessage());
|
|
||||||
} catch (IOException x) {
|
} catch (IOException x) {
|
||||||
throw new IOException("Unable to create symlink: "+x.getMessage());
|
throw new IOException("Unable to create symlink: "+x.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -168,4 +166,13 @@ public Path getLinkTarget(Path f) throws IOException {
|
||||||
*/
|
*/
|
||||||
throw new AssertionError();
|
throw new AssertionError();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String getPathWithoutSchemeAndAuthority(Path path) {
|
||||||
|
// This code depends on Path.toString() to remove the leading slash before
|
||||||
|
// the drive specification on Windows.
|
||||||
|
Path newPath = path.isUriPathAbsolute() ?
|
||||||
|
new Path(null, null, path.toUri().getPath()) :
|
||||||
|
path;
|
||||||
|
return newPath.toString();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,8 @@
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
@ -83,8 +85,12 @@ protected void setPreserve(boolean preserve) {
|
||||||
*/
|
*/
|
||||||
protected void getLocalDestination(LinkedList<String> args)
|
protected void getLocalDestination(LinkedList<String> args)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
try {
|
||||||
String pathString = (args.size() < 2) ? Path.CUR_DIR : args.removeLast();
|
String pathString = (args.size() < 2) ? Path.CUR_DIR : args.removeLast();
|
||||||
dst = new PathData(new File(pathString), getConf());
|
dst = new PathData(new URI(pathString), getConf());
|
||||||
|
} catch (URISyntaxException e) {
|
||||||
|
throw new IOException("unexpected URISyntaxException", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,6 +20,8 @@
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -60,16 +62,20 @@ public static class Merge extends FsCommand {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void processOptions(LinkedList<String> args) throws IOException {
|
protected void processOptions(LinkedList<String> args) throws IOException {
|
||||||
|
try {
|
||||||
CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "nl");
|
CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "nl");
|
||||||
cf.parse(args);
|
cf.parse(args);
|
||||||
|
|
||||||
delimiter = cf.getOpt("nl") ? "\n" : null;
|
delimiter = cf.getOpt("nl") ? "\n" : null;
|
||||||
|
|
||||||
dst = new PathData(new File(args.removeLast()), getConf());
|
dst = new PathData(new URI(args.removeLast()), getConf());
|
||||||
if (dst.exists && dst.stat.isDirectory()) {
|
if (dst.exists && dst.stat.isDirectory()) {
|
||||||
throw new PathIsDirectoryException(dst.toString());
|
throw new PathIsDirectoryException(dst.toString());
|
||||||
}
|
}
|
||||||
srcs = new LinkedList<PathData>();
|
srcs = new LinkedList<PathData>();
|
||||||
|
} catch (URISyntaxException e) {
|
||||||
|
throw new IOException("unexpected URISyntaxException", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -197,9 +203,13 @@ protected void processOptions(LinkedList<String> args) throws IOException {
|
||||||
// commands operating on local paths have no need for glob expansion
|
// commands operating on local paths have no need for glob expansion
|
||||||
@Override
|
@Override
|
||||||
protected List<PathData> expandArgument(String arg) throws IOException {
|
protected List<PathData> expandArgument(String arg) throws IOException {
|
||||||
|
try {
|
||||||
List<PathData> items = new LinkedList<PathData>();
|
List<PathData> items = new LinkedList<PathData>();
|
||||||
items.add(new PathData(new File(arg), getConf()));
|
items.add(new PathData(new URI(arg), getConf()));
|
||||||
return items;
|
return items;
|
||||||
|
} catch (URISyntaxException e) {
|
||||||
|
throw new IOException("unexpected URISyntaxException", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,6 +24,7 @@
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
@ -39,6 +40,9 @@
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates a Path (path), its FileStatus (stat), and its FileSystem (fs).
|
* Encapsulates a Path (path), its FileStatus (stat), and its FileSystem (fs).
|
||||||
|
* PathData ensures that the returned path string will be the same as the
|
||||||
|
* one passed in during initialization (unlike Path objects which can
|
||||||
|
* modify the path string).
|
||||||
* The stat field will be null if the path does not exist.
|
* The stat field will be null if the path does not exist.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
|
@ -51,6 +55,20 @@ public class PathData implements Comparable<PathData> {
|
||||||
public FileStatus stat;
|
public FileStatus stat;
|
||||||
public boolean exists;
|
public boolean exists;
|
||||||
|
|
||||||
|
/* True if the URI scheme was not present in the pathString but inferred.
|
||||||
|
*/
|
||||||
|
private boolean inferredSchemeFromPath = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pre-compiled regular expressions to detect path formats.
|
||||||
|
*/
|
||||||
|
private static final Pattern potentialUri =
|
||||||
|
Pattern.compile("^[a-zA-Z][a-zA-Z0-9+-.]+:");
|
||||||
|
private static final Pattern windowsNonUriAbsolutePath1 =
|
||||||
|
Pattern.compile("^/?[a-zA-Z]:\\\\");
|
||||||
|
private static final Pattern windowsNonUriAbsolutePath2 =
|
||||||
|
Pattern.compile("^/?[a-zA-Z]:/");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an object to wrap the given parameters as fields. The string
|
* Creates an object to wrap the given parameters as fields. The string
|
||||||
* used to create the path will be recorded since the Path object does not
|
* used to create the path will be recorded since the Path object does not
|
||||||
|
@ -67,12 +85,12 @@ public PathData(String pathString, Configuration conf) throws IOException {
|
||||||
* Creates an object to wrap the given parameters as fields. The string
|
* Creates an object to wrap the given parameters as fields. The string
|
||||||
* used to create the path will be recorded since the Path object does not
|
* used to create the path will be recorded since the Path object does not
|
||||||
* return exactly the same string used to initialize it
|
* return exactly the same string used to initialize it
|
||||||
* @param localPath a local File
|
* @param localPath a local URI
|
||||||
* @param conf the configuration file
|
* @param conf the configuration file
|
||||||
* @throws IOException if anything goes wrong...
|
* @throws IOException if anything goes wrong...
|
||||||
*/
|
*/
|
||||||
public PathData(File localPath, Configuration conf) throws IOException {
|
public PathData(URI localPath, Configuration conf) throws IOException {
|
||||||
this(FileSystem.getLocal(conf), localPath.toString());
|
this(FileSystem.getLocal(conf), localPath.getPath());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -86,6 +104,39 @@ private PathData(FileSystem fs, String pathString) throws IOException {
|
||||||
this(fs, pathString, lookupStat(fs, pathString, true));
|
this(fs, pathString, lookupStat(fs, pathString, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the given Windows path.
|
||||||
|
* Throws IOException on failure.
|
||||||
|
* @param pathString a String of the path suppliued by the user.
|
||||||
|
*/
|
||||||
|
private void ValidateWindowsPath(String pathString)
|
||||||
|
throws IOException
|
||||||
|
{
|
||||||
|
if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
|
||||||
|
// Forward slashes disallowed in a backslash-separated path.
|
||||||
|
if (pathString.indexOf('/') != -1) {
|
||||||
|
throw new IOException("Invalid path string " + pathString);
|
||||||
|
}
|
||||||
|
|
||||||
|
inferredSchemeFromPath = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is it a forward slash-separated absolute path?
|
||||||
|
if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
|
||||||
|
inferredSchemeFromPath = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Does it look like a URI? If so then just leave it alone.
|
||||||
|
if (potentialUri.matcher(pathString).find()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Looks like a relative path on Windows.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an object to wrap the given parameters as fields. The string
|
* Creates an object to wrap the given parameters as fields. The string
|
||||||
* used to create the path will be recorded since the Path object does not
|
* used to create the path will be recorded since the Path object does not
|
||||||
|
@ -100,6 +151,10 @@ private PathData(FileSystem fs, String pathString, FileStatus stat)
|
||||||
this.uri = stringToUri(pathString);
|
this.uri = stringToUri(pathString);
|
||||||
this.path = fs.makeQualified(new Path(uri));
|
this.path = fs.makeQualified(new Path(uri));
|
||||||
setStat(stat);
|
setStat(stat);
|
||||||
|
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
ValidateWindowsPath(pathString);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// need a static method for the ctor above
|
// need a static method for the ctor above
|
||||||
|
@ -236,7 +291,7 @@ public PathData getPathDataForChild(PathData child) throws IOException {
|
||||||
* Given a child of this directory, use the directory's path and the child's
|
* Given a child of this directory, use the directory's path and the child's
|
||||||
* basename to construct the string to the child. This preserves relative
|
* basename to construct the string to the child. This preserves relative
|
||||||
* paths since Path will fully qualify.
|
* paths since Path will fully qualify.
|
||||||
* @param child a path contained within this directory
|
* @param childPath a path contained within this directory
|
||||||
* @return String of the path relative to this directory
|
* @return String of the path relative to this directory
|
||||||
*/
|
*/
|
||||||
private String getStringForChildPath(Path childPath) {
|
private String getStringForChildPath(Path childPath) {
|
||||||
|
@ -386,7 +441,14 @@ public String toString() {
|
||||||
// No interpretation of symbols. Just decode % escaped chars.
|
// No interpretation of symbols. Just decode % escaped chars.
|
||||||
String decodedRemainder = uri.getSchemeSpecificPart();
|
String decodedRemainder = uri.getSchemeSpecificPart();
|
||||||
|
|
||||||
if (scheme == null) {
|
// Drop the scheme if it was inferred to ensure fidelity between
|
||||||
|
// the input and output path strings.
|
||||||
|
if ((scheme == null) || (inferredSchemeFromPath)) {
|
||||||
|
if (Path.isWindowsAbsolutePath(decodedRemainder, true)) {
|
||||||
|
// Strip the leading '/' added in stringToUri so users see a valid
|
||||||
|
// Windows path.
|
||||||
|
decodedRemainder = decodedRemainder.substring(1);
|
||||||
|
}
|
||||||
return decodedRemainder;
|
return decodedRemainder;
|
||||||
} else {
|
} else {
|
||||||
StringBuilder buffer = new StringBuilder();
|
StringBuilder buffer = new StringBuilder();
|
||||||
|
@ -409,13 +471,56 @@ public File toFile() {
|
||||||
return ((LocalFileSystem)fs).pathToFile(path);
|
return ((LocalFileSystem)fs).pathToFile(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Normalize the given Windows path string. This does the following:
|
||||||
|
* 1. Adds "file:" scheme for absolute paths.
|
||||||
|
* 2. Ensures the scheme-specific part starts with '/' per RFC2396.
|
||||||
|
* 3. Replaces backslash path separators with forward slashes.
|
||||||
|
* @param pathString Path string supplied by the user.
|
||||||
|
* @return normalized absolute path string. Returns the input string
|
||||||
|
* if it is not a Windows absolute path.
|
||||||
|
*/
|
||||||
|
private static String normalizeWindowsPath(String pathString)
|
||||||
|
throws IOException
|
||||||
|
{
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return pathString;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean slashed =
|
||||||
|
((pathString.length() >= 1) && (pathString.charAt(0) == '/'));
|
||||||
|
|
||||||
|
// Is it a backslash-separated absolute path?
|
||||||
|
if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
|
||||||
|
// Forward slashes disallowed in a backslash-separated path.
|
||||||
|
if (pathString.indexOf('/') != -1) {
|
||||||
|
throw new IOException("Invalid path string " + pathString);
|
||||||
|
}
|
||||||
|
|
||||||
|
pathString = pathString.replace('\\', '/');
|
||||||
|
return "file:" + (slashed ? "" : "/") + pathString;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is it a forward slash-separated absolute path?
|
||||||
|
if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
|
||||||
|
return "file:" + (slashed ? "" : "/") + pathString;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is it a backslash-separated relative file path (no scheme and
|
||||||
|
// no drive-letter specifier)?
|
||||||
|
if ((pathString.indexOf(':') == -1) && (pathString.indexOf('\\') != -1)) {
|
||||||
|
pathString = pathString.replace('\\', '/');
|
||||||
|
}
|
||||||
|
|
||||||
|
return pathString;
|
||||||
|
}
|
||||||
|
|
||||||
/** Construct a URI from a String with unescaped special characters
|
/** Construct a URI from a String with unescaped special characters
|
||||||
* that have non-standard sematics. e.g. /, ?, #. A custom parsing
|
* that have non-standard semantics. e.g. /, ?, #. A custom parsing
|
||||||
* is needed to prevent misbihaviors.
|
* is needed to prevent misbehavior.
|
||||||
* @param pathString The input path in string form
|
* @param pathString The input path in string form
|
||||||
* @return URI
|
* @return URI
|
||||||
*/
|
*/
|
||||||
private static URI stringToUri(String pathString) {
|
private static URI stringToUri(String pathString) throws IOException {
|
||||||
// We can't use 'new URI(String)' directly. Since it doesn't do quoting
|
// We can't use 'new URI(String)' directly. Since it doesn't do quoting
|
||||||
// internally, the internal parser may fail or break the string at wrong
|
// internally, the internal parser may fail or break the string at wrong
|
||||||
// places. Use of multi-argument ctors will quote those chars for us,
|
// places. Use of multi-argument ctors will quote those chars for us,
|
||||||
|
@ -424,9 +529,10 @@ private static URI stringToUri(String pathString) {
|
||||||
// parse uri components
|
// parse uri components
|
||||||
String scheme = null;
|
String scheme = null;
|
||||||
String authority = null;
|
String authority = null;
|
||||||
|
|
||||||
int start = 0;
|
int start = 0;
|
||||||
|
|
||||||
|
pathString = normalizeWindowsPath(pathString);
|
||||||
|
|
||||||
// parse uri scheme, if any
|
// parse uri scheme, if any
|
||||||
int colon = pathString.indexOf(':');
|
int colon = pathString.indexOf(':');
|
||||||
int slash = pathString.indexOf('/');
|
int slash = pathString.indexOf('/');
|
||||||
|
@ -445,8 +551,7 @@ private static URI stringToUri(String pathString) {
|
||||||
authority = pathString.substring(start, authEnd);
|
authority = pathString.substring(start, authEnd);
|
||||||
start = authEnd;
|
start = authEnd;
|
||||||
}
|
}
|
||||||
|
// uri path is the rest of the string. ? or # are not interpreted,
|
||||||
// uri path is the rest of the string. ? or # are not interpreated,
|
|
||||||
// but any occurrence of them will be quoted by the URI ctor.
|
// but any occurrence of them will be quoted by the URI ctor.
|
||||||
String path = pathString.substring(start, pathString.length());
|
String path = pathString.substring(start, pathString.length());
|
||||||
|
|
||||||
|
|
|
@ -60,6 +60,7 @@
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
import org.apache.hadoop.security.ssl.SSLFactory;
|
import org.apache.hadoop.security.ssl.SSLFactory;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
import org.mortbay.io.Buffer;
|
import org.mortbay.io.Buffer;
|
||||||
import org.mortbay.jetty.Connector;
|
import org.mortbay.jetty.Connector;
|
||||||
import org.mortbay.jetty.Handler;
|
import org.mortbay.jetty.Handler;
|
||||||
|
@ -304,6 +305,13 @@ public static Connector createDefaultChannelConnector() {
|
||||||
ret.setAcceptQueueSize(128);
|
ret.setAcceptQueueSize(128);
|
||||||
ret.setResolveNames(false);
|
ret.setResolveNames(false);
|
||||||
ret.setUseDirectBuffers(false);
|
ret.setUseDirectBuffers(false);
|
||||||
|
if(Shell.WINDOWS) {
|
||||||
|
// result of setting the SO_REUSEADDR flag is different on Windows
|
||||||
|
// http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
|
||||||
|
// without this 2 NN's can start on the same machine and listen on
|
||||||
|
// the same port with indeterminate routing of incoming requests to them
|
||||||
|
ret.setReuseAddress(false);
|
||||||
|
}
|
||||||
ret.setHeaderBufferSize(1024*64);
|
ret.setHeaderBufferSize(1024*64);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
|
@ -203,8 +203,8 @@ public void run() {
|
||||||
// It's also possible that we'll end up requesting readahead on some
|
// It's also possible that we'll end up requesting readahead on some
|
||||||
// other FD, which may be wasted work, but won't cause a problem.
|
// other FD, which may be wasted work, but won't cause a problem.
|
||||||
try {
|
try {
|
||||||
NativeIO.posixFadviseIfPossible(fd, off, len,
|
NativeIO.POSIX.posixFadviseIfPossible(fd, off, len,
|
||||||
NativeIO.POSIX_FADV_WILLNEED);
|
NativeIO.POSIX.POSIX_FADV_WILLNEED);
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
if (canceled) {
|
if (canceled) {
|
||||||
// no big deal - the reader canceled the request and closed
|
// no big deal - the reader canceled the request and closed
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.RandomAccessFile;
|
import java.io.RandomAccessFile;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
|
@ -32,7 +33,7 @@
|
||||||
import org.apache.hadoop.io.nativeio.Errno;
|
import org.apache.hadoop.io.nativeio.Errno;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIOException;
|
import org.apache.hadoop.io.nativeio.NativeIOException;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO.Stat;
|
import org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
@ -127,7 +128,7 @@ protected static RandomAccessFile forceSecureOpenForRandomRead(File f,
|
||||||
RandomAccessFile raf = new RandomAccessFile(f, mode);
|
RandomAccessFile raf = new RandomAccessFile(f, mode);
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
Stat stat = NativeIO.getFstat(raf.getFD());
|
Stat stat = NativeIO.POSIX.getFstat(raf.getFD());
|
||||||
checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
|
checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
|
||||||
expectedGroup);
|
expectedGroup);
|
||||||
success = true;
|
success = true;
|
||||||
|
@ -169,7 +170,7 @@ protected static FSDataInputStream forceSecureOpenFSDataInputStream(
|
||||||
rawFilesystem.open(new Path(file.getAbsolutePath()));
|
rawFilesystem.open(new Path(file.getAbsolutePath()));
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
Stat stat = NativeIO.getFstat(in.getFileDescriptor());
|
Stat stat = NativeIO.POSIX.getFstat(in.getFileDescriptor());
|
||||||
checkStat(file, stat.getOwner(), stat.getGroup(), expectedOwner,
|
checkStat(file, stat.getOwner(), stat.getGroup(), expectedOwner,
|
||||||
expectedGroup);
|
expectedGroup);
|
||||||
success = true;
|
success = true;
|
||||||
|
@ -214,7 +215,7 @@ protected static FileInputStream forceSecureOpenForRead(File f, String expectedO
|
||||||
FileInputStream fis = new FileInputStream(f);
|
FileInputStream fis = new FileInputStream(f);
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
Stat stat = NativeIO.getFstat(fis.getFD());
|
Stat stat = NativeIO.POSIX.getFstat(fis.getFD());
|
||||||
checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
|
checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
|
||||||
expectedGroup);
|
expectedGroup);
|
||||||
success = true;
|
success = true;
|
||||||
|
@ -260,36 +261,31 @@ public static FileOutputStream createForWrite(File f, int permissions)
|
||||||
if (skipSecurity) {
|
if (skipSecurity) {
|
||||||
return insecureCreateForWrite(f, permissions);
|
return insecureCreateForWrite(f, permissions);
|
||||||
} else {
|
} else {
|
||||||
// Use the native wrapper around open(2)
|
return NativeIO.getCreateForWriteFileOutputStream(f, permissions);
|
||||||
try {
|
|
||||||
FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
|
|
||||||
NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
|
|
||||||
permissions);
|
|
||||||
return new FileOutputStream(fd);
|
|
||||||
} catch (NativeIOException nioe) {
|
|
||||||
if (nioe.getErrno() == Errno.EEXIST) {
|
|
||||||
throw new AlreadyExistsException(nioe);
|
|
||||||
}
|
|
||||||
throw nioe;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void checkStat(File f, String owner, String group,
|
private static void checkStat(File f, String owner, String group,
|
||||||
String expectedOwner,
|
String expectedOwner,
|
||||||
String expectedGroup) throws IOException {
|
String expectedGroup) throws IOException {
|
||||||
|
boolean success = true;
|
||||||
if (expectedOwner != null &&
|
if (expectedOwner != null &&
|
||||||
!expectedOwner.equals(owner)) {
|
!expectedOwner.equals(owner)) {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
UserGroupInformation ugi =
|
||||||
|
UserGroupInformation.createRemoteUser(expectedOwner);
|
||||||
|
final String adminsGroupString = "Administrators";
|
||||||
|
success = owner.equals(adminsGroupString)
|
||||||
|
&& Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString);
|
||||||
|
} else {
|
||||||
|
success = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!success) {
|
||||||
throw new IOException(
|
throw new IOException(
|
||||||
"Owner '" + owner + "' for path " + f + " did not match " +
|
"Owner '" + owner + "' for path " + f + " did not match " +
|
||||||
"expected owner '" + expectedOwner + "'");
|
"expected owner '" + expectedOwner + "'");
|
||||||
}
|
}
|
||||||
if (expectedGroup != null &&
|
|
||||||
!expectedGroup.equals(group)) {
|
|
||||||
throw new IOException(
|
|
||||||
"Group '" + group + "' for path " + f + " did not match " +
|
|
||||||
"expected group '" + expectedGroup + "'");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,7 +19,10 @@
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileDescriptor;
|
import java.io.FileDescriptor;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.RandomAccessFile;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
@ -27,10 +30,13 @@
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
|
||||||
import org.apache.hadoop.util.NativeCodeLoader;
|
import org.apache.hadoop.util.NativeCodeLoader;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* JNI wrappers for various native IO-related calls not available in Java.
|
* JNI wrappers for various native IO-related calls not available in Java.
|
||||||
* These functions should generally be used alongside a fallback to another
|
* These functions should generally be used alongside a fallback to another
|
||||||
|
@ -39,6 +45,7 @@
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
@InterfaceStability.Unstable
|
@InterfaceStability.Unstable
|
||||||
public class NativeIO {
|
public class NativeIO {
|
||||||
|
public static class POSIX {
|
||||||
// Flags for open() call from bits/fcntl.h
|
// Flags for open() call from bits/fcntl.h
|
||||||
public static final int O_RDONLY = 00;
|
public static final int O_RDONLY = 00;
|
||||||
public static final int O_WRONLY = 01;
|
public static final int O_WRONLY = 01;
|
||||||
|
@ -86,7 +93,6 @@ public class NativeIO {
|
||||||
private static final Log LOG = LogFactory.getLog(NativeIO.class);
|
private static final Log LOG = LogFactory.getLog(NativeIO.class);
|
||||||
|
|
||||||
private static boolean nativeLoaded = false;
|
private static boolean nativeLoaded = false;
|
||||||
private static boolean workaroundNonThreadSafePasswdCalls = false;
|
|
||||||
private static boolean fadvisePossible = true;
|
private static boolean fadvisePossible = true;
|
||||||
private static boolean syncFileRangePossible = true;
|
private static boolean syncFileRangePossible = true;
|
||||||
|
|
||||||
|
@ -134,8 +140,28 @@ public static boolean isAvailable() {
|
||||||
public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
|
public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
|
||||||
/** Wrapper around fstat(2) */
|
/** Wrapper around fstat(2) */
|
||||||
private static native Stat fstat(FileDescriptor fd) throws IOException;
|
private static native Stat fstat(FileDescriptor fd) throws IOException;
|
||||||
/** Wrapper around chmod(2) */
|
|
||||||
public static native void chmod(String path, int mode) throws IOException;
|
/** Native chmod implementation. On UNIX, it is a wrapper around chmod(2) */
|
||||||
|
private static native void chmodImpl(String path, int mode) throws IOException;
|
||||||
|
|
||||||
|
public static void chmod(String path, int mode) throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
chmodImpl(path, mode);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
chmodImpl(path, mode);
|
||||||
|
} catch (NativeIOException nioe) {
|
||||||
|
if (nioe.getErrorCode() == 3) {
|
||||||
|
throw new NativeIOException("No such file or directory",
|
||||||
|
Errno.ENOENT);
|
||||||
|
} else {
|
||||||
|
LOG.warn(String.format("NativeIO.chmod error (%d): %s",
|
||||||
|
nioe.getErrorCode(), nioe.getMessage()));
|
||||||
|
throw new NativeIOException("Unknown error", Errno.UNKNOWN);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Wrapper around posix_fadvise(2) */
|
/** Wrapper around posix_fadvise(2) */
|
||||||
static native void posix_fadvise(
|
static native void posix_fadvise(
|
||||||
|
@ -145,9 +171,6 @@ static native void posix_fadvise(
|
||||||
static native void sync_file_range(
|
static native void sync_file_range(
|
||||||
FileDescriptor fd, long offset, long nbytes, int flags) throws NativeIOException;
|
FileDescriptor fd, long offset, long nbytes, int flags) throws NativeIOException;
|
||||||
|
|
||||||
/** Initialize the JNI method ID and class ID cache */
|
|
||||||
private static native void initNative();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Call posix_fadvise on the given file descriptor. See the manpage
|
* Call posix_fadvise on the given file descriptor. See the manpage
|
||||||
* for this syscall for more information. On systems where this
|
* for this syscall for more information. On systems where this
|
||||||
|
@ -190,6 +213,10 @@ public static void syncFileRangeIfPossible(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Linux only methods used for getOwner() implementation */
|
||||||
|
private static native long getUIDforFDOwnerforOwner(FileDescriptor fd) throws IOException;
|
||||||
|
private static native String getUserName(long uid) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Result type of the fstat call
|
* Result type of the fstat call
|
||||||
*/
|
*/
|
||||||
|
@ -238,27 +265,19 @@ public int getMode() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static native String getUserName(int uid) throws IOException;
|
/**
|
||||||
|
* Returns the file stat for a file descriptor.
|
||||||
static native String getGroupName(int uid) throws IOException;
|
*
|
||||||
|
* @param fd file descriptor.
|
||||||
private static class CachedName {
|
* @return the file descriptor file stat.
|
||||||
final long timestamp;
|
* @throws IOException thrown if there was an IO error while obtaining the file stat.
|
||||||
final String name;
|
*/
|
||||||
|
public static Stat getFstat(FileDescriptor fd) throws IOException {
|
||||||
public CachedName(String name, long timestamp) {
|
Stat stat = fstat(fd);
|
||||||
this.name = name;
|
stat.owner = getName(IdCache.USER, stat.ownerId);
|
||||||
this.timestamp = timestamp;
|
stat.group = getName(IdCache.GROUP, stat.groupId);
|
||||||
|
return stat;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
|
|
||||||
new ConcurrentHashMap<Integer, CachedName>();
|
|
||||||
|
|
||||||
private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
|
|
||||||
new ConcurrentHashMap<Integer, CachedName>();
|
|
||||||
|
|
||||||
private enum IdCache { USER, GROUP }
|
|
||||||
|
|
||||||
private static String getName(IdCache domain, int id) throws IOException {
|
private static String getName(IdCache domain, int id) throws IOException {
|
||||||
Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
|
Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
|
||||||
|
@ -281,18 +300,257 @@ private static String getName(IdCache domain, int id) throws IOException {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static native String getUserName(int uid) throws IOException;
|
||||||
|
static native String getGroupName(int uid) throws IOException;
|
||||||
|
|
||||||
|
private static class CachedName {
|
||||||
|
final long timestamp;
|
||||||
|
final String name;
|
||||||
|
|
||||||
|
public CachedName(String name, long timestamp) {
|
||||||
|
this.name = name;
|
||||||
|
this.timestamp = timestamp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
|
||||||
|
new ConcurrentHashMap<Integer, CachedName>();
|
||||||
|
|
||||||
|
private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
|
||||||
|
new ConcurrentHashMap<Integer, CachedName>();
|
||||||
|
|
||||||
|
private enum IdCache { USER, GROUP }
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean workaroundNonThreadSafePasswdCalls = false;
|
||||||
|
|
||||||
|
|
||||||
|
public static class Windows {
|
||||||
|
// Flags for CreateFile() call on Windows
|
||||||
|
public static final long GENERIC_READ = 0x80000000L;
|
||||||
|
public static final long GENERIC_WRITE = 0x40000000L;
|
||||||
|
|
||||||
|
public static final long FILE_SHARE_READ = 0x00000001L;
|
||||||
|
public static final long FILE_SHARE_WRITE = 0x00000002L;
|
||||||
|
public static final long FILE_SHARE_DELETE = 0x00000004L;
|
||||||
|
|
||||||
|
public static final long CREATE_NEW = 1;
|
||||||
|
public static final long CREATE_ALWAYS = 2;
|
||||||
|
public static final long OPEN_EXISTING = 3;
|
||||||
|
public static final long OPEN_ALWAYS = 4;
|
||||||
|
public static final long TRUNCATE_EXISTING = 5;
|
||||||
|
|
||||||
|
public static final long FILE_BEGIN = 0;
|
||||||
|
public static final long FILE_CURRENT = 1;
|
||||||
|
public static final long FILE_END = 2;
|
||||||
|
|
||||||
|
/** Wrapper around CreateFile() on Windows */
|
||||||
|
public static native FileDescriptor createFile(String path,
|
||||||
|
long desiredAccess, long shareMode, long creationDisposition)
|
||||||
|
throws IOException;
|
||||||
|
|
||||||
|
/** Wrapper around SetFilePointer() on Windows */
|
||||||
|
public static native long setFilePointer(FileDescriptor fd,
|
||||||
|
long distanceToMove, long moveMethod) throws IOException;
|
||||||
|
|
||||||
|
/** Windows only methods used for getOwner() implementation */
|
||||||
|
private static native String getOwner(FileDescriptor fd) throws IOException;
|
||||||
|
|
||||||
|
static {
|
||||||
|
if (NativeCodeLoader.isNativeCodeLoaded()) {
|
||||||
|
try {
|
||||||
|
initNative();
|
||||||
|
nativeLoaded = true;
|
||||||
|
} catch (Throwable t) {
|
||||||
|
// This can happen if the user has an older version of libhadoop.so
|
||||||
|
// installed - in this case we can continue without native IO
|
||||||
|
// after warning
|
||||||
|
LOG.error("Unable to initialize NativeIO libraries", t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Log LOG = LogFactory.getLog(NativeIO.class);
|
||||||
|
|
||||||
|
private static boolean nativeLoaded = false;
|
||||||
|
|
||||||
|
static {
|
||||||
|
if (NativeCodeLoader.isNativeCodeLoaded()) {
|
||||||
|
try {
|
||||||
|
initNative();
|
||||||
|
nativeLoaded = true;
|
||||||
|
} catch (Throwable t) {
|
||||||
|
// This can happen if the user has an older version of libhadoop.so
|
||||||
|
// installed - in this case we can continue without native IO
|
||||||
|
// after warning
|
||||||
|
LOG.error("Unable to initialize NativeIO libraries", t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the file stat for a file descriptor.
|
* Return true if the JNI-based native IO extensions are available.
|
||||||
*
|
|
||||||
* @param fd file descriptor.
|
|
||||||
* @return the file descriptor file stat.
|
|
||||||
* @throws IOException thrown if there was an IO error while obtaining the file stat.
|
|
||||||
*/
|
*/
|
||||||
public static Stat getFstat(FileDescriptor fd) throws IOException {
|
public static boolean isAvailable() {
|
||||||
Stat stat = fstat(fd);
|
return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
|
||||||
stat.owner = getName(IdCache.USER, stat.ownerId);
|
}
|
||||||
stat.group = getName(IdCache.GROUP, stat.groupId);
|
|
||||||
return stat;
|
/** Initialize the JNI method ID and class ID cache */
|
||||||
|
private static native void initNative();
|
||||||
|
|
||||||
|
private static class CachedUid {
|
||||||
|
final long timestamp;
|
||||||
|
final String username;
|
||||||
|
public CachedUid(String username, long timestamp) {
|
||||||
|
this.timestamp = timestamp;
|
||||||
|
this.username = username;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private static final Map<Long, CachedUid> uidCache =
|
||||||
|
new ConcurrentHashMap<Long, CachedUid>();
|
||||||
|
private static long cacheTimeout;
|
||||||
|
private static boolean initialized = false;
|
||||||
|
|
||||||
|
public static String getOwner(FileDescriptor fd) throws IOException {
|
||||||
|
ensureInitialized();
|
||||||
|
if (Shell.WINDOWS) {
|
||||||
|
String owner = Windows.getOwner(fd);
|
||||||
|
int i = owner.indexOf('\\');
|
||||||
|
if (i != -1)
|
||||||
|
owner = owner.substring(i + 1);
|
||||||
|
return owner;
|
||||||
|
} else {
|
||||||
|
long uid = POSIX.getUIDforFDOwnerforOwner(fd);
|
||||||
|
CachedUid cUid = uidCache.get(uid);
|
||||||
|
long now = System.currentTimeMillis();
|
||||||
|
if (cUid != null && (cUid.timestamp + cacheTimeout) > now) {
|
||||||
|
return cUid.username;
|
||||||
|
}
|
||||||
|
String user = POSIX.getUserName(uid);
|
||||||
|
LOG.info("Got UserName " + user + " for UID " + uid
|
||||||
|
+ " from the native implementation");
|
||||||
|
cUid = new CachedUid(user, now);
|
||||||
|
uidCache.put(uid, cUid);
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a FileInputStream that shares delete permission on the
|
||||||
|
* file opened, i.e. other process can delete the file the
|
||||||
|
* FileInputStream is reading. Only Windows implementation uses
|
||||||
|
* the native interface.
|
||||||
|
*/
|
||||||
|
public static FileInputStream getShareDeleteFileInputStream(File f)
|
||||||
|
throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// On Linux the default FileInputStream shares delete permission
|
||||||
|
// on the file opened.
|
||||||
|
//
|
||||||
|
return new FileInputStream(f);
|
||||||
|
} else {
|
||||||
|
// Use Windows native interface to create a FileInputStream that
|
||||||
|
// shares delete permission on the file opened.
|
||||||
|
//
|
||||||
|
FileDescriptor fd = Windows.createFile(
|
||||||
|
f.getAbsolutePath(),
|
||||||
|
Windows.GENERIC_READ,
|
||||||
|
Windows.FILE_SHARE_READ |
|
||||||
|
Windows.FILE_SHARE_WRITE |
|
||||||
|
Windows.FILE_SHARE_DELETE,
|
||||||
|
Windows.OPEN_EXISTING);
|
||||||
|
return new FileInputStream(fd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a FileInputStream that shares delete permission on the
|
||||||
|
* file opened at a given offset, i.e. other process can delete
|
||||||
|
* the file the FileInputStream is reading. Only Windows implementation
|
||||||
|
* uses the native interface.
|
||||||
|
*/
|
||||||
|
public static FileInputStream getShareDeleteFileInputStream(File f, long seekOffset)
|
||||||
|
throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
RandomAccessFile rf = new RandomAccessFile(f, "r");
|
||||||
|
if (seekOffset > 0) {
|
||||||
|
rf.seek(seekOffset);
|
||||||
|
}
|
||||||
|
return new FileInputStream(rf.getFD());
|
||||||
|
} else {
|
||||||
|
// Use Windows native interface to create a FileInputStream that
|
||||||
|
// shares delete permission on the file opened, and set it to the
|
||||||
|
// given offset.
|
||||||
|
//
|
||||||
|
FileDescriptor fd = NativeIO.Windows.createFile(
|
||||||
|
f.getAbsolutePath(),
|
||||||
|
NativeIO.Windows.GENERIC_READ,
|
||||||
|
NativeIO.Windows.FILE_SHARE_READ |
|
||||||
|
NativeIO.Windows.FILE_SHARE_WRITE |
|
||||||
|
NativeIO.Windows.FILE_SHARE_DELETE,
|
||||||
|
NativeIO.Windows.OPEN_EXISTING);
|
||||||
|
if (seekOffset > 0)
|
||||||
|
NativeIO.Windows.setFilePointer(fd, seekOffset, NativeIO.Windows.FILE_BEGIN);
|
||||||
|
return new FileInputStream(fd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the specified File for write access, ensuring that it does not exist.
|
||||||
|
* @param f the file that we want to create
|
||||||
|
* @param permissions we want to have on the file (if security is enabled)
|
||||||
|
*
|
||||||
|
* @throws AlreadyExistsException if the file already exists
|
||||||
|
* @throws IOException if any other error occurred
|
||||||
|
*/
|
||||||
|
public static FileOutputStream getCreateForWriteFileOutputStream(File f, int permissions)
|
||||||
|
throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// Use the native wrapper around open(2)
|
||||||
|
try {
|
||||||
|
FileDescriptor fd = NativeIO.POSIX.open(f.getAbsolutePath(),
|
||||||
|
NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT
|
||||||
|
| NativeIO.POSIX.O_EXCL, permissions);
|
||||||
|
return new FileOutputStream(fd);
|
||||||
|
} catch (NativeIOException nioe) {
|
||||||
|
if (nioe.getErrno() == Errno.EEXIST) {
|
||||||
|
throw new AlreadyExistsException(nioe);
|
||||||
|
}
|
||||||
|
throw nioe;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Use the Windows native APIs to create equivalent FileOutputStream
|
||||||
|
try {
|
||||||
|
FileDescriptor fd = NativeIO.Windows.createFile(f.getCanonicalPath(),
|
||||||
|
NativeIO.Windows.GENERIC_WRITE,
|
||||||
|
NativeIO.Windows.FILE_SHARE_DELETE
|
||||||
|
| NativeIO.Windows.FILE_SHARE_READ
|
||||||
|
| NativeIO.Windows.FILE_SHARE_WRITE,
|
||||||
|
NativeIO.Windows.CREATE_NEW);
|
||||||
|
NativeIO.POSIX.chmod(f.getCanonicalPath(), permissions);
|
||||||
|
return new FileOutputStream(fd);
|
||||||
|
} catch (NativeIOException nioe) {
|
||||||
|
if (nioe.getErrorCode() == 80) {
|
||||||
|
// ERROR_FILE_EXISTS
|
||||||
|
// 80 (0x50)
|
||||||
|
// The file exists
|
||||||
|
throw new AlreadyExistsException(nioe);
|
||||||
|
}
|
||||||
|
throw nioe;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private synchronized static void ensureInitialized() {
|
||||||
|
if (!initialized) {
|
||||||
|
cacheTimeout =
|
||||||
|
new Configuration().getLong("hadoop.security.uid.cache.secs",
|
||||||
|
4*60*60) * 1000;
|
||||||
|
LOG.info("Initialized cache for UID to User mapping with a cache" +
|
||||||
|
" timeout of " + cacheTimeout/1000 + " seconds.");
|
||||||
|
initialized = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -18,20 +18,40 @@
|
||||||
package org.apache.hadoop.io.nativeio;
|
package org.apache.hadoop.io.nativeio;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An exception generated by a call to the native IO code.
|
* An exception generated by a call to the native IO code.
|
||||||
*
|
*
|
||||||
* These exceptions simply wrap <i>errno</i> result codes.
|
* These exceptions simply wrap <i>errno</i> result codes on Linux,
|
||||||
|
* or the System Error Code on Windows.
|
||||||
*/
|
*/
|
||||||
public class NativeIOException extends IOException {
|
public class NativeIOException extends IOException {
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
private Errno errno;
|
private Errno errno;
|
||||||
|
|
||||||
|
// Java has no unsigned primitive error code. Use a signed 32-bit
|
||||||
|
// integer to hold the unsigned 32-bit integer.
|
||||||
|
private int errorCode;
|
||||||
|
|
||||||
public NativeIOException(String msg, Errno errno) {
|
public NativeIOException(String msg, Errno errno) {
|
||||||
super(msg);
|
super(msg);
|
||||||
this.errno = errno;
|
this.errno = errno;
|
||||||
|
// Windows error code is always set to ERROR_SUCCESS on Linux,
|
||||||
|
// i.e. no failure on Windows
|
||||||
|
this.errorCode = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public NativeIOException(String msg, int errorCode) {
|
||||||
|
super(msg);
|
||||||
|
this.errorCode = errorCode;
|
||||||
|
this.errno = Errno.UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getErrorCode() {
|
||||||
|
return errorCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Errno getErrno() {
|
public Errno getErrno() {
|
||||||
|
@ -40,8 +60,10 @@ public Errno getErrno() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
|
if (Shell.WINDOWS)
|
||||||
|
return errorCode + ": " + super.getMessage();
|
||||||
|
else
|
||||||
return errno.toString() + ": " + super.getMessage();
|
return errno.toString() + ": " + super.getMessage();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -138,10 +138,12 @@ public void doGet(HttpServletRequest request, HttpServletResponse response)
|
||||||
*/
|
*/
|
||||||
void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> map) {
|
void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> map) {
|
||||||
for (Map.Entry<String, Map<String, List<TagsMetricsPair>>> context : map.entrySet()) {
|
for (Map.Entry<String, Map<String, List<TagsMetricsPair>>> context : map.entrySet()) {
|
||||||
out.println(context.getKey());
|
out.print(context.getKey());
|
||||||
|
out.print("\n");
|
||||||
for (Map.Entry<String, List<TagsMetricsPair>> record : context.getValue().entrySet()) {
|
for (Map.Entry<String, List<TagsMetricsPair>> record : context.getValue().entrySet()) {
|
||||||
indent(out, 1);
|
indent(out, 1);
|
||||||
out.println(record.getKey());
|
out.print(record.getKey());
|
||||||
|
out.print("\n");
|
||||||
for (TagsMetricsPair pair : record.getValue()) {
|
for (TagsMetricsPair pair : record.getValue()) {
|
||||||
indent(out, 2);
|
indent(out, 2);
|
||||||
// Prints tag values in the form "{key=value,key=value}:"
|
// Prints tag values in the form "{key=value,key=value}:"
|
||||||
|
@ -157,7 +159,7 @@ void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> m
|
||||||
out.print("=");
|
out.print("=");
|
||||||
out.print(tagValue.getValue().toString());
|
out.print(tagValue.getValue().toString());
|
||||||
}
|
}
|
||||||
out.println("}:");
|
out.print("}:\n");
|
||||||
|
|
||||||
// Now print metric values, one per line
|
// Now print metric values, one per line
|
||||||
for (Map.Entry<String, Number> metricValue :
|
for (Map.Entry<String, Number> metricValue :
|
||||||
|
@ -165,7 +167,8 @@ void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> m
|
||||||
indent(out, 3);
|
indent(out, 3);
|
||||||
out.print(metricValue.getKey());
|
out.print(metricValue.getKey());
|
||||||
out.print("=");
|
out.print("=");
|
||||||
out.println(metricValue.getValue().toString());
|
out.print(metricValue.getValue().toString());
|
||||||
|
out.print("\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,8 @@ private static List<String> getUnixGroups(final String user) throws IOException
|
||||||
LOG.warn("got exception trying to get groups for user " + user, e);
|
LOG.warn("got exception trying to get groups for user " + user, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
StringTokenizer tokenizer = new StringTokenizer(result);
|
StringTokenizer tokenizer =
|
||||||
|
new StringTokenizer(result, Shell.TOKEN_SEPARATOR_REGEX);
|
||||||
List<String> groups = new LinkedList<String>();
|
List<String> groups = new LinkedList<String>();
|
||||||
while (tokenizer.hasMoreTokens()) {
|
while (tokenizer.hasMoreTokens()) {
|
||||||
groups.add(tokenizer.nextToken());
|
groups.add(tokenizer.nextToken());
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Timer;
|
import java.util.Timer;
|
||||||
import java.util.TimerTask;
|
import java.util.TimerTask;
|
||||||
|
@ -44,46 +45,208 @@ abstract public class Shell {
|
||||||
|
|
||||||
public static final Log LOG = LogFactory.getLog(Shell.class);
|
public static final Log LOG = LogFactory.getLog(Shell.class);
|
||||||
|
|
||||||
|
private static boolean IS_JAVA7_OR_ABOVE =
|
||||||
|
System.getProperty("java.version").substring(0, 3).compareTo("1.7") >= 0;
|
||||||
|
|
||||||
|
public static boolean isJava7OrAbove() {
|
||||||
|
return IS_JAVA7_OR_ABOVE;
|
||||||
|
}
|
||||||
|
|
||||||
/** a Unix command to get the current user's name */
|
/** a Unix command to get the current user's name */
|
||||||
public final static String USER_NAME_COMMAND = "whoami";
|
public final static String USER_NAME_COMMAND = "whoami";
|
||||||
|
|
||||||
|
/** Windows CreateProcess synchronization object */
|
||||||
|
public static final Object WindowsProcessLaunchLock = new Object();
|
||||||
|
|
||||||
/** a Unix command to get the current user's groups list */
|
/** a Unix command to get the current user's groups list */
|
||||||
public static String[] getGroupsCommand() {
|
public static String[] getGroupsCommand() {
|
||||||
return new String[]{"bash", "-c", "groups"};
|
return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
|
||||||
|
: new String[]{"bash", "-c", "groups"};
|
||||||
}
|
}
|
||||||
|
|
||||||
/** a Unix command to get a given user's groups list */
|
/** a Unix command to get a given user's groups list */
|
||||||
public static String[] getGroupsForUserCommand(final String user) {
|
public static String[] getGroupsForUserCommand(final String user) {
|
||||||
//'groups username' command return is non-consistent across different unixes
|
//'groups username' command return is non-consistent across different unixes
|
||||||
return new String [] {"bash", "-c", "id -Gn " + user};
|
return (WINDOWS)? new String[] { WINUTILS, "groups", "-F", "\"" + user + "\""}
|
||||||
|
: new String [] {"bash", "-c", "id -Gn " + user};
|
||||||
}
|
}
|
||||||
|
|
||||||
/** a Unix command to get a given netgroup's user list */
|
/** a Unix command to get a given netgroup's user list */
|
||||||
public static String[] getUsersForNetgroupCommand(final String netgroup) {
|
public static String[] getUsersForNetgroupCommand(final String netgroup) {
|
||||||
//'groups username' command return is non-consistent across different unixes
|
//'groups username' command return is non-consistent across different unixes
|
||||||
return new String [] {"bash", "-c", "getent netgroup " + netgroup};
|
return (WINDOWS)? new String [] {"cmd", "/c", "getent netgroup " + netgroup}
|
||||||
|
: new String [] {"bash", "-c", "getent netgroup " + netgroup};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Return a command to get permission information. */
|
||||||
|
public static String[] getGetPermissionCommand() {
|
||||||
|
return (WINDOWS) ? new String[] { WINUTILS, "ls", "-F" }
|
||||||
|
: new String[] { "/bin/ls", "-ld" };
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return a command to set permission */
|
||||||
|
public static String[] getSetPermissionCommand(String perm, boolean recursive) {
|
||||||
|
if (recursive) {
|
||||||
|
return (WINDOWS) ? new String[] { WINUTILS, "chmod", "-R", perm }
|
||||||
|
: new String[] { "chmod", "-R", perm };
|
||||||
|
} else {
|
||||||
|
return (WINDOWS) ? new String[] { WINUTILS, "chmod", perm }
|
||||||
|
: new String[] { "chmod", perm };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a command to set permission for specific file.
|
||||||
|
*
|
||||||
|
* @param perm String permission to set
|
||||||
|
* @param recursive boolean true to apply to all sub-directories recursively
|
||||||
|
* @param file String file to set
|
||||||
|
* @return String[] containing command and arguments
|
||||||
|
*/
|
||||||
|
public static String[] getSetPermissionCommand(String perm, boolean recursive,
|
||||||
|
String file) {
|
||||||
|
String[] baseCmd = getSetPermissionCommand(perm, recursive);
|
||||||
|
String[] cmdWithFile = Arrays.copyOf(baseCmd, baseCmd.length + 1);
|
||||||
|
cmdWithFile[cmdWithFile.length - 1] = file;
|
||||||
|
return cmdWithFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return a command to set owner */
|
||||||
|
public static String[] getSetOwnerCommand(String owner) {
|
||||||
|
return (WINDOWS) ? new String[] { WINUTILS, "chown", "\"" + owner + "\"" }
|
||||||
|
: new String[] { "chown", owner };
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Return a command to create symbolic links */
|
||||||
|
public static String[] getSymlinkCommand(String target, String link) {
|
||||||
|
return WINDOWS ? new String[] { WINUTILS, "symlink", link, target }
|
||||||
|
: new String[] { "ln", "-s", target, link };
|
||||||
|
}
|
||||||
|
|
||||||
/** a Unix command to set permission */
|
/** a Unix command to set permission */
|
||||||
public static final String SET_PERMISSION_COMMAND = "chmod";
|
public static final String SET_PERMISSION_COMMAND = "chmod";
|
||||||
/** a Unix command to set owner */
|
/** a Unix command to set owner */
|
||||||
public static final String SET_OWNER_COMMAND = "chown";
|
public static final String SET_OWNER_COMMAND = "chown";
|
||||||
|
|
||||||
|
/** a Unix command to set the change user's groups list */
|
||||||
public static final String SET_GROUP_COMMAND = "chgrp";
|
public static final String SET_GROUP_COMMAND = "chgrp";
|
||||||
/** a Unix command to create a link */
|
/** a Unix command to create a link */
|
||||||
public static final String LINK_COMMAND = "ln";
|
public static final String LINK_COMMAND = "ln";
|
||||||
/** a Unix command to get a link target */
|
/** a Unix command to get a link target */
|
||||||
public static final String READ_LINK_COMMAND = "readlink";
|
public static final String READ_LINK_COMMAND = "readlink";
|
||||||
/** Return a Unix command to get permission information. */
|
|
||||||
public static String[] getGET_PERMISSION_COMMAND() {
|
|
||||||
//force /bin/ls, except on windows.
|
|
||||||
return new String[] {(WINDOWS ? "ls" : "/bin/ls"), "-ld"};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**Time after which the executing script would be timedout*/
|
/**Time after which the executing script would be timedout*/
|
||||||
protected long timeOutInterval = 0L;
|
protected long timeOutInterval = 0L;
|
||||||
/** If or not script timed out*/
|
/** If or not script timed out*/
|
||||||
private AtomicBoolean timedOut;
|
private AtomicBoolean timedOut;
|
||||||
|
|
||||||
|
|
||||||
|
/** Centralized logic to discover and validate the sanity of the Hadoop
|
||||||
|
* home directory. Returns either NULL or a directory that exists and
|
||||||
|
* was specified via either -Dhadoop.home.dir or the HADOOP_HOME ENV
|
||||||
|
* variable. This does a lot of work so it should only be called
|
||||||
|
* privately for initialization once per process.
|
||||||
|
**/
|
||||||
|
private static String checkHadoopHome() {
|
||||||
|
|
||||||
|
// first check the Dflag hadoop.home.dir with JVM scope
|
||||||
|
String home = System.getProperty("hadoop.home.dir");
|
||||||
|
|
||||||
|
// fall back to the system/user-global env variable
|
||||||
|
if (home == null) {
|
||||||
|
home = System.getenv("HADOOP_HOME");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// couldn't find either setting for hadoop's home directory
|
||||||
|
if (home == null) {
|
||||||
|
throw new IOException("HADOOP_HOME or hadoop.home.dir are not set.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (home.startsWith("\"") && home.endsWith("\"")) {
|
||||||
|
home = home.substring(1, home.length()-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// check that the home setting is actually a directory that exists
|
||||||
|
File homedir = new File(home);
|
||||||
|
if (!homedir.isAbsolute() || !homedir.exists() || !homedir.isDirectory()) {
|
||||||
|
throw new IOException("Hadoop home directory " + homedir
|
||||||
|
+ " does not exist, is not a directory, or is not an absolute path.");
|
||||||
|
}
|
||||||
|
|
||||||
|
home = homedir.getCanonicalPath();
|
||||||
|
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
LOG.error("Failed to detect a valid hadoop home directory", ioe);
|
||||||
|
home = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return home;
|
||||||
|
}
|
||||||
|
private static String HADOOP_HOME_DIR = checkHadoopHome();
|
||||||
|
|
||||||
|
// Public getter, throws an exception if HADOOP_HOME failed validation
|
||||||
|
// checks and is being referenced downstream.
|
||||||
|
public static final String getHadoopHome() throws IOException {
|
||||||
|
if (HADOOP_HOME_DIR == null) {
|
||||||
|
throw new IOException("Misconfigured HADOOP_HOME cannot be referenced.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return HADOOP_HOME_DIR;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** fully qualify the path to a binary that should be in a known hadoop
|
||||||
|
* bin location. This is primarily useful for disambiguating call-outs
|
||||||
|
* to executable sub-components of Hadoop to avoid clashes with other
|
||||||
|
* executables that may be in the path. Caveat: this call doesn't
|
||||||
|
* just format the path to the bin directory. It also checks for file
|
||||||
|
* existence of the composed path. The output of this call should be
|
||||||
|
* cached by callers.
|
||||||
|
* */
|
||||||
|
public static final String getQualifiedBinPath(String executable)
|
||||||
|
throws IOException {
|
||||||
|
// construct hadoop bin path to the specified executable
|
||||||
|
String fullExeName = HADOOP_HOME_DIR + File.separator + "bin"
|
||||||
|
+ File.separator + executable;
|
||||||
|
|
||||||
|
File exeFile = new File(fullExeName);
|
||||||
|
if (!exeFile.exists()) {
|
||||||
|
throw new IOException("Could not locate executable " + fullExeName
|
||||||
|
+ " in the Hadoop binaries.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return exeFile.getCanonicalPath();
|
||||||
|
}
|
||||||
|
|
||||||
/** Set to true on Windows platforms */
|
/** Set to true on Windows platforms */
|
||||||
public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
|
public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
|
||||||
= System.getProperty("os.name").startsWith("Windows");
|
= System.getProperty("os.name").startsWith("Windows");
|
||||||
|
|
||||||
|
public static final boolean LINUX
|
||||||
|
= System.getProperty("os.name").startsWith("Linux");
|
||||||
|
|
||||||
|
/** a Windows utility to emulate Unix commands */
|
||||||
|
public static final String WINUTILS = getWinUtilsPath();
|
||||||
|
|
||||||
|
public static final String getWinUtilsPath() {
|
||||||
|
String winUtilsPath = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (WINDOWS) {
|
||||||
|
winUtilsPath = getQualifiedBinPath("winutils.exe");
|
||||||
|
}
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
LOG.error("Failed to locate the winutils binary in the hadoop binary path",
|
||||||
|
ioe);
|
||||||
|
}
|
||||||
|
|
||||||
|
return winUtilsPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Token separator regex used to parse Shell tool outputs */
|
||||||
|
public static final String TOKEN_SEPARATOR_REGEX
|
||||||
|
= WINDOWS ? "[|\n\r]" : "[ \t\n\r\f]";
|
||||||
|
|
||||||
private long interval; // refresh interval in msec
|
private long interval; // refresh interval in msec
|
||||||
private long lastTime; // last time the command was performed
|
private long lastTime; // last time the command was performed
|
||||||
private Map<String, String> environment; // env for the command execution
|
private Map<String, String> environment; // env for the command execution
|
||||||
|
@ -144,7 +307,19 @@ private void runCommand() throws IOException {
|
||||||
builder.directory(this.dir);
|
builder.directory(this.dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (Shell.WINDOWS) {
|
||||||
|
synchronized (WindowsProcessLaunchLock) {
|
||||||
|
// To workaround the race condition issue with child processes
|
||||||
|
// inheriting unintended handles during process launch that can
|
||||||
|
// lead to hangs on reading output and error streams, we
|
||||||
|
// serialize process creation. More info available at:
|
||||||
|
// http://support.microsoft.com/kb/315939
|
||||||
process = builder.start();
|
process = builder.start();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
process = builder.start();
|
||||||
|
}
|
||||||
|
|
||||||
if (timeOutInterval > 0) {
|
if (timeOutInterval > 0) {
|
||||||
timeOutTimer = new Timer("Shell command timeout");
|
timeOutTimer = new Timer("Shell command timeout");
|
||||||
timeoutTimerTask = new ShellTimeoutTimerTask(
|
timeoutTimerTask = new ShellTimeoutTimerTask(
|
||||||
|
|
|
@ -30,13 +30,17 @@
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.commons.lang.SystemUtils;
|
import org.apache.commons.lang.SystemUtils;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
import com.google.common.net.InetAddresses;
|
import com.google.common.net.InetAddresses;
|
||||||
|
|
||||||
|
@ -52,6 +56,27 @@ public class StringUtils {
|
||||||
*/
|
*/
|
||||||
public static final int SHUTDOWN_HOOK_PRIORITY = 0;
|
public static final int SHUTDOWN_HOOK_PRIORITY = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shell environment variables: $ followed by one letter or _ followed by
|
||||||
|
* multiple letters, numbers, or underscores. The group captures the
|
||||||
|
* environment variable name without the leading $.
|
||||||
|
*/
|
||||||
|
public static final Pattern SHELL_ENV_VAR_PATTERN =
|
||||||
|
Pattern.compile("\\$([A-Za-z_]{1}[A-Za-z0-9_]*)");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Windows environment variables: surrounded by %. The group captures the
|
||||||
|
* environment variable name without the leading and trailing %.
|
||||||
|
*/
|
||||||
|
public static final Pattern WIN_ENV_VAR_PATTERN = Pattern.compile("%(.*?)%");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Regular expression that matches and captures environment variable names
|
||||||
|
* according to platform-specific rules.
|
||||||
|
*/
|
||||||
|
public static final Pattern ENV_VAR_PATTERN = Shell.WINDOWS ?
|
||||||
|
WIN_ENV_VAR_PATTERN : SHELL_ENV_VAR_PATTERN;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Make a string representation of the exception.
|
* Make a string representation of the exception.
|
||||||
* @param e The exception to stringify
|
* @param e The exception to stringify
|
||||||
|
@ -799,6 +824,28 @@ public static String join(CharSequence separator, Iterable<?> strings) {
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Concatenates strings, using a separator.
|
||||||
|
*
|
||||||
|
* @param separator to join with
|
||||||
|
* @param strings to join
|
||||||
|
* @return the joined string
|
||||||
|
*/
|
||||||
|
public static String join(CharSequence separator, String[] strings) {
|
||||||
|
// Ideally we don't have to duplicate the code here if array is iterable.
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
boolean first = true;
|
||||||
|
for (String s : strings) {
|
||||||
|
if (first) {
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
sb.append(separator);
|
||||||
|
}
|
||||||
|
sb.append(s);
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert SOME_STUFF to SomeStuff
|
* Convert SOME_STUFF to SomeStuff
|
||||||
*
|
*
|
||||||
|
@ -814,4 +861,37 @@ public static String camelize(String s) {
|
||||||
|
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matches a template string against a pattern, replaces matched tokens with
|
||||||
|
* the supplied replacements, and returns the result. The regular expression
|
||||||
|
* must use a capturing group. The value of the first capturing group is used
|
||||||
|
* to look up the replacement. If no replacement is found for the token, then
|
||||||
|
* it is replaced with the empty string.
|
||||||
|
*
|
||||||
|
* For example, assume template is "%foo%_%bar%_%baz%", pattern is "%(.*?)%",
|
||||||
|
* and replacements contains 2 entries, mapping "foo" to "zoo" and "baz" to
|
||||||
|
* "zaz". The result returned would be "zoo__zaz".
|
||||||
|
*
|
||||||
|
* @param template String template to receive replacements
|
||||||
|
* @param pattern Pattern to match for identifying tokens, must use a capturing
|
||||||
|
* group
|
||||||
|
* @param replacements Map<String, String> mapping tokens identified by the
|
||||||
|
* capturing group to their replacement values
|
||||||
|
* @return String template with replacements
|
||||||
|
*/
|
||||||
|
public static String replaceTokens(String template, Pattern pattern,
|
||||||
|
Map<String, String> replacements) {
|
||||||
|
StringBuffer sb = new StringBuffer();
|
||||||
|
Matcher matcher = pattern.matcher(template);
|
||||||
|
while (matcher.find()) {
|
||||||
|
String replacement = replacements.get(matcher.group(1));
|
||||||
|
if (replacement == null) {
|
||||||
|
replacement = "";
|
||||||
|
}
|
||||||
|
matcher.appendReplacement(sb, Matcher.quoteReplacement(replacement));
|
||||||
|
}
|
||||||
|
matcher.appendTail(sb);
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,9 +60,7 @@ <h3>Platforms</h3>
|
||||||
Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
|
Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Win32 is supported as a <i>development</i> platform. Distributed operation
|
Windows is also a supported platform.
|
||||||
has not been well tested on Win32, so this is not a <i>production</i>
|
|
||||||
platform.
|
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
|
@ -84,15 +82,6 @@ <h3>Requisite Software</h3>
|
||||||
</li>
|
</li>
|
||||||
</ol>
|
</ol>
|
||||||
|
|
||||||
<h4>Additional requirements for Windows</h4>
|
|
||||||
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
<a href="http://www.cygwin.com/">Cygwin</a> - Required for shell support in
|
|
||||||
addition to the required software above.
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
|
|
||||||
<h3>Installing Required Software</h3>
|
<h3>Installing Required Software</h3>
|
||||||
|
|
||||||
<p>If your platform does not have the required software listed above, you
|
<p>If your platform does not have the required software listed above, you
|
||||||
|
@ -104,13 +93,6 @@ <h3>Installing Required Software</h3>
|
||||||
$ sudo apt-get install rsync<br>
|
$ sudo apt-get install rsync<br>
|
||||||
</pre></blockquote></p>
|
</pre></blockquote></p>
|
||||||
|
|
||||||
<p>On Windows, if you did not install the required software when you
|
|
||||||
installed cygwin, start the cygwin installer and select the packages:</p>
|
|
||||||
<ul>
|
|
||||||
<li>openssh - the "Net" category</li>
|
|
||||||
<li>rsync - the "Net" category</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h2>Getting Started</h2>
|
<h2>Getting Started</h2>
|
||||||
|
|
||||||
<p>First, you need to get a copy of the Hadoop code.</p>
|
<p>First, you need to get a copy of the Hadoop code.</p>
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
|
||||||
|
Microsoft Visual Studio Solution File, Format Version 11.00
|
||||||
|
# Visual Studio 2010
|
||||||
|
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "native", "native.vcxproj", "{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}"
|
||||||
|
EndProject
|
||||||
|
Global
|
||||||
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
|
Debug|Mixed Platforms = Debug|Mixed Platforms
|
||||||
|
Debug|Win32 = Debug|Win32
|
||||||
|
Debug|x64 = Debug|x64
|
||||||
|
Release|Mixed Platforms = Release|Mixed Platforms
|
||||||
|
Release|Win32 = Release|Win32
|
||||||
|
Release|x64 = Release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed Platforms.ActiveCfg = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed Platforms.Build.0 = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.ActiveCfg = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.Build.0 = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed Platforms.ActiveCfg = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed Platforms.Build.0 = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.ActiveCfg = Release|x64
|
||||||
|
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.Build.0 = Release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
|
HideSolutionNode = FALSE
|
||||||
|
EndGlobalSection
|
||||||
|
EndGlobal
|
|
@ -0,0 +1,96 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup Label="ProjectConfigurations">
|
||||||
|
<ProjectConfiguration Include="Release|x64">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
</ItemGroup>
|
||||||
|
<PropertyGroup Label="Globals">
|
||||||
|
<ProjectGuid>{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}</ProjectGuid>
|
||||||
|
<Keyword>Win32Proj</Keyword>
|
||||||
|
<RootNamespace>native</RootNamespace>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||||
|
<ConfigurationType>DynamicLibrary</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||||
|
<ImportGroup Label="ExtensionSettings">
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<PropertyGroup Label="UserMacros" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
<OutDir>..\..\..\target\bin\</OutDir>
|
||||||
|
<IntDir>..\..\..\target\native\$(Configuration)\</IntDir>
|
||||||
|
<TargetName>hadoop</TargetName>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>NotUsing</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;NATIVE_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
<AdditionalIncludeDirectories>..\winutils\include;..\..\..\target\native\javah;%JAVA_HOME%\include;%JAVA_HOME%\include\win32;.\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||||
|
<CompileAs>CompileAsC</CompileAs>
|
||||||
|
<DisableSpecificWarnings>4244</DisableSpecificWarnings>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Windows</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
<OptimizeReferences>true</OptimizeReferences>
|
||||||
|
<AdditionalDependencies>Ws2_32.lib;libwinutils.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||||
|
<AdditionalLibraryDirectories>..\..\..\target\bin;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\security\JniBasedUnixGroupsMappingWin.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c" />
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h" />
|
||||||
|
<ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h" />
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h" />
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h" />
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
|
||||||
|
<ClInclude Include="src\org_apache_hadoop.h" />
|
||||||
|
</ItemGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||||
|
<ImportGroup Label="ExtensionTargets">
|
||||||
|
</ImportGroup>
|
||||||
|
</Project>
|
|
@ -0,0 +1,87 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup>
|
||||||
|
<Filter Include="Source Files">
|
||||||
|
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
|
||||||
|
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
|
||||||
|
</Filter>
|
||||||
|
<Filter Include="Header Files">
|
||||||
|
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
|
||||||
|
<Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
|
||||||
|
</Filter>
|
||||||
|
<Filter Include="Resource Files">
|
||||||
|
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
|
||||||
|
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
|
||||||
|
</Filter>
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
<ClCompile Include="src\org\apache\hadoop\security\JniBasedUnixGroupsMappingWin.c">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClCompile>
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
<ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h">
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h">
|
||||||
|
<Filter>Header Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">
|
||||||
|
<Filter>Header Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
<ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h">
|
||||||
|
<Filter>Header Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
<ClInclude Include="src\org_apache_hadoop.h">
|
||||||
|
<Filter>Header Files</Filter>
|
||||||
|
</ClInclude>
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
|
@ -16,10 +16,14 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "config.h"
|
|
||||||
#include "org_apache_hadoop.h"
|
#include "org_apache_hadoop.h"
|
||||||
#include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h"
|
#include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h"
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include "config.h"
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
//****************************
|
//****************************
|
||||||
// Simple Functions
|
// Simple Functions
|
||||||
//****************************
|
//****************************
|
||||||
|
@ -61,6 +65,9 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_init
|
||||||
|
|
||||||
JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_compressBytesDirect
|
JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_compressBytesDirect
|
||||||
(JNIEnv *env, jobject thisj){
|
(JNIEnv *env, jobject thisj){
|
||||||
|
const char* uncompressed_bytes;
|
||||||
|
char *compressed_bytes;
|
||||||
|
|
||||||
// Get members of Lz4Compressor
|
// Get members of Lz4Compressor
|
||||||
jobject clazz = (*env)->GetStaticObjectField(env, thisj, Lz4Compressor_clazz);
|
jobject clazz = (*env)->GetStaticObjectField(env, thisj, Lz4Compressor_clazz);
|
||||||
jobject uncompressed_direct_buf = (*env)->GetObjectField(env, thisj, Lz4Compressor_uncompressedDirectBuf);
|
jobject uncompressed_direct_buf = (*env)->GetObjectField(env, thisj, Lz4Compressor_uncompressedDirectBuf);
|
||||||
|
@ -70,7 +77,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_comp
|
||||||
|
|
||||||
// Get the input direct buffer
|
// Get the input direct buffer
|
||||||
LOCK_CLASS(env, clazz, "Lz4Compressor");
|
LOCK_CLASS(env, clazz, "Lz4Compressor");
|
||||||
const char* uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
|
uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "Lz4Compressor");
|
UNLOCK_CLASS(env, clazz, "Lz4Compressor");
|
||||||
|
|
||||||
if (uncompressed_bytes == 0) {
|
if (uncompressed_bytes == 0) {
|
||||||
|
@ -79,7 +86,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_comp
|
||||||
|
|
||||||
// Get the output direct buffer
|
// Get the output direct buffer
|
||||||
LOCK_CLASS(env, clazz, "Lz4Compressor");
|
LOCK_CLASS(env, clazz, "Lz4Compressor");
|
||||||
char* compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
|
compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "Lz4Compressor");
|
UNLOCK_CLASS(env, clazz, "Lz4Compressor");
|
||||||
|
|
||||||
if (compressed_bytes == 0) {
|
if (compressed_bytes == 0) {
|
||||||
|
|
|
@ -16,10 +16,13 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "config.h"
|
|
||||||
#include "org_apache_hadoop.h"
|
#include "org_apache_hadoop.h"
|
||||||
#include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"
|
#include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include "config.h"
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
int LZ4_uncompress_unknownOutputSize(const char* source, char* dest, int isize, int maxOutputSize);
|
int LZ4_uncompress_unknownOutputSize(const char* source, char* dest, int isize, int maxOutputSize);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -58,6 +61,9 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_in
|
||||||
|
|
||||||
JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_decompressBytesDirect
|
JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_decompressBytesDirect
|
||||||
(JNIEnv *env, jobject thisj){
|
(JNIEnv *env, jobject thisj){
|
||||||
|
const char *compressed_bytes;
|
||||||
|
char *uncompressed_bytes;
|
||||||
|
|
||||||
// Get members of Lz4Decompressor
|
// Get members of Lz4Decompressor
|
||||||
jobject clazz = (*env)->GetStaticObjectField(env,thisj, Lz4Decompressor_clazz);
|
jobject clazz = (*env)->GetStaticObjectField(env,thisj, Lz4Decompressor_clazz);
|
||||||
jobject compressed_direct_buf = (*env)->GetObjectField(env,thisj, Lz4Decompressor_compressedDirectBuf);
|
jobject compressed_direct_buf = (*env)->GetObjectField(env,thisj, Lz4Decompressor_compressedDirectBuf);
|
||||||
|
@ -67,7 +73,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_de
|
||||||
|
|
||||||
// Get the input direct buffer
|
// Get the input direct buffer
|
||||||
LOCK_CLASS(env, clazz, "Lz4Decompressor");
|
LOCK_CLASS(env, clazz, "Lz4Decompressor");
|
||||||
const char* compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
|
compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
|
UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
|
||||||
|
|
||||||
if (compressed_bytes == 0) {
|
if (compressed_bytes == 0) {
|
||||||
|
@ -76,7 +82,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_de
|
||||||
|
|
||||||
// Get the output direct buffer
|
// Get the output direct buffer
|
||||||
LOCK_CLASS(env, clazz, "Lz4Decompressor");
|
LOCK_CLASS(env, clazz, "Lz4Decompressor");
|
||||||
char* uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
|
uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
|
UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
|
||||||
|
|
||||||
if (uncompressed_bytes == 0) {
|
if (uncompressed_bytes == 0) {
|
||||||
|
|
|
@ -16,12 +16,18 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <dlfcn.h>
|
|
||||||
|
#if defined HADOOP_SNAPPY_LIBRARY
|
||||||
|
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <dlfcn.h>
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
#include "org_apache_hadoop_io_compress_snappy.h"
|
#include "org_apache_hadoop_io_compress_snappy.h"
|
||||||
#include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
|
#include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
|
||||||
|
|
||||||
|
@ -81,7 +87,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
|
||||||
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
|
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
|
||||||
|
|
||||||
if (uncompressed_bytes == 0) {
|
if (uncompressed_bytes == 0) {
|
||||||
return 0;
|
return (jint)0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the output direct buffer
|
// Get the output direct buffer
|
||||||
|
@ -90,7 +96,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
|
||||||
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
|
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
|
||||||
|
|
||||||
if (compressed_bytes == 0) {
|
if (compressed_bytes == 0) {
|
||||||
return 0;
|
return (jint)0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* size_t should always be 4 bytes or larger. */
|
/* size_t should always be 4 bytes or larger. */
|
||||||
|
@ -109,3 +115,5 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
|
||||||
(*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
|
(*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
|
||||||
return (jint)buf_len;
|
return (jint)buf_len;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif //define HADOOP_SNAPPY_LIBRARY
|
||||||
|
|
|
@ -16,12 +16,18 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <dlfcn.h>
|
|
||||||
|
#if defined HADOOP_SNAPPY_LIBRARY
|
||||||
|
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
#include <dlfcn.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "org_apache_hadoop_io_compress_snappy.h"
|
#include "org_apache_hadoop_io_compress_snappy.h"
|
||||||
#include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h"
|
#include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h"
|
||||||
|
|
||||||
|
@ -103,3 +109,5 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompres
|
||||||
|
|
||||||
return (jint)uncompressed_direct_buf_len;
|
return (jint)uncompressed_direct_buf_len;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif //define HADOOP_SNAPPY_LIBRARY
|
||||||
|
|
|
@ -16,12 +16,15 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <dlfcn.h>
|
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <dlfcn.h>
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "org_apache_hadoop_io_compress_zlib.h"
|
#include "org_apache_hadoop_io_compress_zlib.h"
|
||||||
#include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
|
#include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
|
||||||
|
|
||||||
|
@ -35,23 +38,90 @@ static jfieldID ZlibCompressor_directBufferSize;
|
||||||
static jfieldID ZlibCompressor_finish;
|
static jfieldID ZlibCompressor_finish;
|
||||||
static jfieldID ZlibCompressor_finished;
|
static jfieldID ZlibCompressor_finished;
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
|
static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
|
||||||
static int (*dlsym_deflate)(z_streamp, int);
|
static int (*dlsym_deflate)(z_streamp, int);
|
||||||
static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
|
static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
|
||||||
static int (*dlsym_deflateReset)(z_streamp);
|
static int (*dlsym_deflateReset)(z_streamp);
|
||||||
static int (*dlsym_deflateEnd)(z_streamp);
|
static int (*dlsym_deflateEnd)(z_streamp);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
#include <Strsafe.h>
|
||||||
|
typedef int (__cdecl *__dlsym_deflateInit2_) (z_streamp, int, int, int, int, int, const char *, int);
|
||||||
|
typedef int (__cdecl *__dlsym_deflate) (z_streamp, int);
|
||||||
|
typedef int (__cdecl *__dlsym_deflateSetDictionary) (z_streamp, const Bytef *, uInt);
|
||||||
|
typedef int (__cdecl *__dlsym_deflateReset) (z_streamp);
|
||||||
|
typedef int (__cdecl *__dlsym_deflateEnd) (z_streamp);
|
||||||
|
static __dlsym_deflateInit2_ dlsym_deflateInit2_;
|
||||||
|
static __dlsym_deflate dlsym_deflate;
|
||||||
|
static __dlsym_deflateSetDictionary dlsym_deflateSetDictionary;
|
||||||
|
static __dlsym_deflateReset dlsym_deflateReset;
|
||||||
|
static __dlsym_deflateEnd dlsym_deflateEnd;
|
||||||
|
|
||||||
|
// Try to load zlib.dll from the dir where hadoop.dll is located.
|
||||||
|
HANDLE LoadZlibTryHadoopNativeDir() {
|
||||||
|
HMODULE libz = NULL;
|
||||||
|
PCWSTR HADOOP_DLL = L"hadoop.dll";
|
||||||
|
size_t HADOOP_DLL_LEN = 10;
|
||||||
|
WCHAR path[MAX_PATH] = { 0 };
|
||||||
|
BOOL isPathValid = FALSE;
|
||||||
|
|
||||||
|
// Get hadoop.dll full path
|
||||||
|
HMODULE hModule = GetModuleHandle(HADOOP_DLL);
|
||||||
|
if (hModule != NULL) {
|
||||||
|
if (GetModuleFileName(hModule, path, MAX_PATH) > 0) {
|
||||||
|
size_t size = 0;
|
||||||
|
if (StringCchLength(path, MAX_PATH, &size) == S_OK) {
|
||||||
|
|
||||||
|
// Update path variable to have the full path to the zlib.dll
|
||||||
|
size = size - HADOOP_DLL_LEN;
|
||||||
|
if (size >= 0) {
|
||||||
|
path[size] = L'\0';
|
||||||
|
if (StringCchCat(path, MAX_PATH, HADOOP_ZLIB_LIBRARY) == S_OK) {
|
||||||
|
isPathValid = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPathValid) {
|
||||||
|
libz = LoadLibrary(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// fallback to system paths
|
||||||
|
if (!libz) {
|
||||||
|
libz = LoadLibrary(HADOOP_ZLIB_LIBRARY);
|
||||||
|
}
|
||||||
|
|
||||||
|
return libz;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT void JNICALL
|
||||||
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
|
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
|
||||||
JNIEnv *env, jclass class
|
JNIEnv *env, jclass class
|
||||||
) {
|
) {
|
||||||
|
#ifdef UNIX
|
||||||
// Load libz.so
|
// Load libz.so
|
||||||
void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
|
void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
|
||||||
if (!libz) {
|
if (!libz) {
|
||||||
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
|
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
HMODULE libz = LoadZlibTryHadoopNativeDir();
|
||||||
|
|
||||||
|
if (!libz) {
|
||||||
|
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
// Locate the requisite symbols from libz.so
|
// Locate the requisite symbols from libz.so
|
||||||
dlerror(); // Clear any existing error
|
dlerror(); // Clear any existing error
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
|
LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
|
||||||
|
@ -59,6 +129,15 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
|
LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
|
LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
|
LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_deflateInit2_, dlsym_deflateInit2_, env, libz, "deflateInit2_");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_deflate, dlsym_deflate, env, libz, "deflate");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_deflateSetDictionary, dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_deflateReset, dlsym_deflateReset, env, libz, "deflateReset");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_deflateEnd, dlsym_deflateEnd, env, libz, "deflateEnd");
|
||||||
|
#endif
|
||||||
|
|
||||||
// Initialize the requisite fieldIds
|
// Initialize the requisite fieldIds
|
||||||
ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
|
ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
|
||||||
|
@ -84,6 +163,8 @@ JNIEXPORT jlong JNICALL
|
||||||
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
|
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
|
||||||
JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
|
JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
|
||||||
) {
|
) {
|
||||||
|
int rv = 0;
|
||||||
|
static const int memLevel = 8; // See zconf.h
|
||||||
// Create a z_stream
|
// Create a z_stream
|
||||||
z_stream *stream = malloc(sizeof(z_stream));
|
z_stream *stream = malloc(sizeof(z_stream));
|
||||||
if (!stream) {
|
if (!stream) {
|
||||||
|
@ -93,8 +174,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
|
||||||
memset((void*)stream, 0, sizeof(z_stream));
|
memset((void*)stream, 0, sizeof(z_stream));
|
||||||
|
|
||||||
// Initialize stream
|
// Initialize stream
|
||||||
static const int memLevel = 8; // See zconf.h
|
rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
|
||||||
int rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
|
|
||||||
memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
|
memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
|
||||||
|
|
||||||
if (rv != Z_OK) {
|
if (rv != Z_OK) {
|
||||||
|
@ -129,11 +209,12 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_setDictionary(
|
||||||
JNIEnv *env, jclass class, jlong stream,
|
JNIEnv *env, jclass class, jlong stream,
|
||||||
jarray b, jint off, jint len
|
jarray b, jint off, jint len
|
||||||
) {
|
) {
|
||||||
|
int rv = 0;
|
||||||
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
|
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
|
||||||
if (!buf) {
|
if (!buf) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
int rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
|
rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
|
||||||
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
|
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
|
||||||
|
|
||||||
if (rv != Z_OK) {
|
if (rv != Z_OK) {
|
||||||
|
@ -157,6 +238,17 @@ JNIEXPORT jint JNICALL
|
||||||
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
|
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
|
||||||
JNIEnv *env, jobject this
|
JNIEnv *env, jobject this
|
||||||
) {
|
) {
|
||||||
|
jobject clazz = NULL;
|
||||||
|
jobject uncompressed_direct_buf = NULL;
|
||||||
|
jint uncompressed_direct_buf_off = 0;
|
||||||
|
jint uncompressed_direct_buf_len = 0;
|
||||||
|
jobject compressed_direct_buf = NULL;
|
||||||
|
jint compressed_direct_buf_len = 0;
|
||||||
|
jboolean finish;
|
||||||
|
Bytef* uncompressed_bytes = NULL;
|
||||||
|
Bytef* compressed_bytes = NULL;
|
||||||
|
int rv = 0;
|
||||||
|
jint no_compressed_bytes = 0;
|
||||||
// Get members of ZlibCompressor
|
// Get members of ZlibCompressor
|
||||||
z_stream *stream = ZSTREAM(
|
z_stream *stream = ZSTREAM(
|
||||||
(*env)->GetLongField(env, this,
|
(*env)->GetLongField(env, this,
|
||||||
|
@ -168,25 +260,25 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get members of ZlibCompressor
|
// Get members of ZlibCompressor
|
||||||
jobject clazz = (*env)->GetStaticObjectField(env, this,
|
clazz = (*env)->GetStaticObjectField(env, this,
|
||||||
ZlibCompressor_clazz);
|
ZlibCompressor_clazz);
|
||||||
jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this,
|
uncompressed_direct_buf = (*env)->GetObjectField(env, this,
|
||||||
ZlibCompressor_uncompressedDirectBuf);
|
ZlibCompressor_uncompressedDirectBuf);
|
||||||
jint uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
|
uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
|
||||||
ZlibCompressor_uncompressedDirectBufOff);
|
ZlibCompressor_uncompressedDirectBufOff);
|
||||||
jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
|
uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
|
||||||
ZlibCompressor_uncompressedDirectBufLen);
|
ZlibCompressor_uncompressedDirectBufLen);
|
||||||
|
|
||||||
jobject compressed_direct_buf = (*env)->GetObjectField(env, this,
|
compressed_direct_buf = (*env)->GetObjectField(env, this,
|
||||||
ZlibCompressor_compressedDirectBuf);
|
ZlibCompressor_compressedDirectBuf);
|
||||||
jint compressed_direct_buf_len = (*env)->GetIntField(env, this,
|
compressed_direct_buf_len = (*env)->GetIntField(env, this,
|
||||||
ZlibCompressor_directBufferSize);
|
ZlibCompressor_directBufferSize);
|
||||||
|
|
||||||
jboolean finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
|
finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
|
||||||
|
|
||||||
// Get the input direct buffer
|
// Get the input direct buffer
|
||||||
LOCK_CLASS(env, clazz, "ZlibCompressor");
|
LOCK_CLASS(env, clazz, "ZlibCompressor");
|
||||||
Bytef* uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
|
uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
|
||||||
uncompressed_direct_buf);
|
uncompressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
|
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
|
||||||
|
|
||||||
|
@ -196,7 +288,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
|
||||||
|
|
||||||
// Get the output direct buffer
|
// Get the output direct buffer
|
||||||
LOCK_CLASS(env, clazz, "ZlibCompressor");
|
LOCK_CLASS(env, clazz, "ZlibCompressor");
|
||||||
Bytef* compressed_bytes = (*env)->GetDirectBufferAddress(env,
|
compressed_bytes = (*env)->GetDirectBufferAddress(env,
|
||||||
compressed_direct_buf);
|
compressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
|
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
|
||||||
|
|
||||||
|
@ -211,9 +303,8 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
|
||||||
stream->avail_out = compressed_direct_buf_len;
|
stream->avail_out = compressed_direct_buf_len;
|
||||||
|
|
||||||
// Compress
|
// Compress
|
||||||
int rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
|
rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
|
||||||
|
|
||||||
jint no_compressed_bytes = 0;
|
|
||||||
switch (rv) {
|
switch (rv) {
|
||||||
// Contingency? - Report error by throwing appropriate exceptions
|
// Contingency? - Report error by throwing appropriate exceptions
|
||||||
case Z_STREAM_END:
|
case Z_STREAM_END:
|
||||||
|
|
|
@ -16,12 +16,15 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <dlfcn.h>
|
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <dlfcn.h>
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "org_apache_hadoop_io_compress_zlib.h"
|
#include "org_apache_hadoop_io_compress_zlib.h"
|
||||||
#include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
|
#include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
|
||||||
|
|
||||||
|
@ -35,30 +38,70 @@ static jfieldID ZlibDecompressor_directBufferSize;
|
||||||
static jfieldID ZlibDecompressor_needDict;
|
static jfieldID ZlibDecompressor_needDict;
|
||||||
static jfieldID ZlibDecompressor_finished;
|
static jfieldID ZlibDecompressor_finished;
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
|
static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
|
||||||
static int (*dlsym_inflate)(z_streamp, int);
|
static int (*dlsym_inflate)(z_streamp, int);
|
||||||
static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
|
static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
|
||||||
static int (*dlsym_inflateReset)(z_streamp);
|
static int (*dlsym_inflateReset)(z_streamp);
|
||||||
static int (*dlsym_inflateEnd)(z_streamp);
|
static int (*dlsym_inflateEnd)(z_streamp);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
#include <Strsafe.h>
|
||||||
|
typedef int (__cdecl *__dlsym_inflateInit2_)(z_streamp, int, const char *, int);
|
||||||
|
typedef int (__cdecl *__dlsym_inflate)(z_streamp, int);
|
||||||
|
typedef int (__cdecl *__dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
|
||||||
|
typedef int (__cdecl *__dlsym_inflateReset)(z_streamp);
|
||||||
|
typedef int (__cdecl *__dlsym_inflateEnd)(z_streamp);
|
||||||
|
static __dlsym_inflateInit2_ dlsym_inflateInit2_;
|
||||||
|
static __dlsym_inflate dlsym_inflate;
|
||||||
|
static __dlsym_inflateSetDictionary dlsym_inflateSetDictionary;
|
||||||
|
static __dlsym_inflateReset dlsym_inflateReset;
|
||||||
|
static __dlsym_inflateEnd dlsym_inflateEnd;
|
||||||
|
extern HANDLE LoadZlibTryHadoopNativeDir();
|
||||||
|
#endif
|
||||||
|
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT void JNICALL
|
||||||
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
|
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
|
||||||
JNIEnv *env, jclass class
|
JNIEnv *env, jclass class
|
||||||
) {
|
) {
|
||||||
// Load libz.so
|
// Load libz.so
|
||||||
|
#ifdef UNIX
|
||||||
void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
|
void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
|
||||||
if (!libz) {
|
if (!libz) {
|
||||||
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
|
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
HMODULE libz = LoadZlibTryHadoopNativeDir();
|
||||||
|
|
||||||
|
if (!libz) {
|
||||||
|
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
// Locate the requisite symbols from libz.so
|
// Locate the requisite symbols from libz.so
|
||||||
|
#ifdef UNIX
|
||||||
dlerror(); // Clear any existing error
|
dlerror(); // Clear any existing error
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
|
LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
|
LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
|
LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
|
LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
|
||||||
LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
|
LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_inflateInit2_, dlsym_inflateInit2_, env, libz, "inflateInit2_");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_inflate, dlsym_inflate, env, libz, "inflate");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_inflateSetDictionary, dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_inflateReset, dlsym_inflateReset, env, libz, "inflateReset");
|
||||||
|
LOAD_DYNAMIC_SYMBOL(__dlsym_inflateEnd, dlsym_inflateEnd, env, libz, "inflateEnd");
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
// Initialize the requisite fieldIds
|
// Initialize the requisite fieldIds
|
||||||
ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
|
ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
|
||||||
|
@ -84,6 +127,7 @@ JNIEXPORT jlong JNICALL
|
||||||
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
|
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
|
||||||
JNIEnv *env, jclass cls, jint windowBits
|
JNIEnv *env, jclass cls, jint windowBits
|
||||||
) {
|
) {
|
||||||
|
int rv = 0;
|
||||||
z_stream *stream = malloc(sizeof(z_stream));
|
z_stream *stream = malloc(sizeof(z_stream));
|
||||||
memset((void*)stream, 0, sizeof(z_stream));
|
memset((void*)stream, 0, sizeof(z_stream));
|
||||||
|
|
||||||
|
@ -92,7 +136,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
|
||||||
return (jlong)0;
|
return (jlong)0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
|
rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
|
||||||
|
|
||||||
if (rv != Z_OK) {
|
if (rv != Z_OK) {
|
||||||
// Contingency - Report error by throwing appropriate exceptions
|
// Contingency - Report error by throwing appropriate exceptions
|
||||||
|
@ -121,12 +165,13 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_setDictionary(
|
||||||
JNIEnv *env, jclass cls, jlong stream,
|
JNIEnv *env, jclass cls, jlong stream,
|
||||||
jarray b, jint off, jint len
|
jarray b, jint off, jint len
|
||||||
) {
|
) {
|
||||||
|
int rv = 0;
|
||||||
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
|
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
|
||||||
if (!buf) {
|
if (!buf) {
|
||||||
THROW(env, "java/lang/InternalError", NULL);
|
THROW(env, "java/lang/InternalError", NULL);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
int rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
|
rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
|
||||||
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
|
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
|
||||||
|
|
||||||
if (rv != Z_OK) {
|
if (rv != Z_OK) {
|
||||||
|
@ -152,6 +197,16 @@ JNIEXPORT jint JNICALL
|
||||||
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
|
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
|
||||||
JNIEnv *env, jobject this
|
JNIEnv *env, jobject this
|
||||||
) {
|
) {
|
||||||
|
jobject clazz = NULL;
|
||||||
|
jarray compressed_direct_buf = NULL;
|
||||||
|
jint compressed_direct_buf_off = 0;
|
||||||
|
jint compressed_direct_buf_len = 0;
|
||||||
|
jarray uncompressed_direct_buf = NULL;
|
||||||
|
jint uncompressed_direct_buf_len = 0;
|
||||||
|
Bytef *compressed_bytes = NULL;
|
||||||
|
Bytef *uncompressed_bytes = NULL;
|
||||||
|
int rv = 0;
|
||||||
|
int no_decompressed_bytes = 0;
|
||||||
// Get members of ZlibDecompressor
|
// Get members of ZlibDecompressor
|
||||||
z_stream *stream = ZSTREAM(
|
z_stream *stream = ZSTREAM(
|
||||||
(*env)->GetLongField(env, this,
|
(*env)->GetLongField(env, this,
|
||||||
|
@ -163,23 +218,23 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get members of ZlibDecompressor
|
// Get members of ZlibDecompressor
|
||||||
jobject clazz = (*env)->GetStaticObjectField(env, this,
|
clazz = (*env)->GetStaticObjectField(env, this,
|
||||||
ZlibDecompressor_clazz);
|
ZlibDecompressor_clazz);
|
||||||
jarray compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
|
compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
|
||||||
ZlibDecompressor_compressedDirectBuf);
|
ZlibDecompressor_compressedDirectBuf);
|
||||||
jint compressed_direct_buf_off = (*env)->GetIntField(env, this,
|
compressed_direct_buf_off = (*env)->GetIntField(env, this,
|
||||||
ZlibDecompressor_compressedDirectBufOff);
|
ZlibDecompressor_compressedDirectBufOff);
|
||||||
jint compressed_direct_buf_len = (*env)->GetIntField(env, this,
|
compressed_direct_buf_len = (*env)->GetIntField(env, this,
|
||||||
ZlibDecompressor_compressedDirectBufLen);
|
ZlibDecompressor_compressedDirectBufLen);
|
||||||
|
|
||||||
jarray uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
|
uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
|
||||||
ZlibDecompressor_uncompressedDirectBuf);
|
ZlibDecompressor_uncompressedDirectBuf);
|
||||||
jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
|
uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
|
||||||
ZlibDecompressor_directBufferSize);
|
ZlibDecompressor_directBufferSize);
|
||||||
|
|
||||||
// Get the input direct buffer
|
// Get the input direct buffer
|
||||||
LOCK_CLASS(env, clazz, "ZlibDecompressor");
|
LOCK_CLASS(env, clazz, "ZlibDecompressor");
|
||||||
Bytef *compressed_bytes = (*env)->GetDirectBufferAddress(env,
|
compressed_bytes = (*env)->GetDirectBufferAddress(env,
|
||||||
compressed_direct_buf);
|
compressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
|
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
|
||||||
|
|
||||||
|
@ -189,7 +244,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
|
||||||
|
|
||||||
// Get the output direct buffer
|
// Get the output direct buffer
|
||||||
LOCK_CLASS(env, clazz, "ZlibDecompressor");
|
LOCK_CLASS(env, clazz, "ZlibDecompressor");
|
||||||
Bytef *uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
|
uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
|
||||||
uncompressed_direct_buf);
|
uncompressed_direct_buf);
|
||||||
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
|
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
|
||||||
|
|
||||||
|
@ -204,10 +259,9 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
|
||||||
stream->avail_out = uncompressed_direct_buf_len;
|
stream->avail_out = uncompressed_direct_buf_len;
|
||||||
|
|
||||||
// Decompress
|
// Decompress
|
||||||
int rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
|
rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
|
||||||
|
|
||||||
// Contingency? - Report error by throwing appropriate exceptions
|
// Contingency? - Report error by throwing appropriate exceptions
|
||||||
int no_decompressed_bytes = 0;
|
|
||||||
switch (rv) {
|
switch (rv) {
|
||||||
case Z_STREAM_END:
|
case Z_STREAM_END:
|
||||||
{
|
{
|
||||||
|
@ -299,4 +353,3 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_end(
|
||||||
/**
|
/**
|
||||||
* vim: sw=2: ts=2: et:
|
* vim: sw=2: ts=2: et:
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
|
@ -19,14 +19,23 @@
|
||||||
#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
|
#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
|
||||||
#define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
|
#define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
|
||||||
|
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <config.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <zlib.h>
|
||||||
|
#include <zconf.h>
|
||||||
#include <dlfcn.h>
|
#include <dlfcn.h>
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
#include <stddef.h>
|
#endif
|
||||||
#include <zconf.h>
|
|
||||||
#include <zlib.h>
|
|
||||||
|
|
||||||
#include "config.h"
|
#ifdef WINDOWS
|
||||||
#include "org_apache_hadoop.h"
|
#include <jni.h>
|
||||||
|
#define HADOOP_ZLIB_LIBRARY L"zlib1.dll"
|
||||||
|
#include <zlib.h>
|
||||||
|
#include <zconf.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
/* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
|
/* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
|
||||||
#define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
|
#define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
|
||||||
|
|
|
@ -18,6 +18,10 @@
|
||||||
|
|
||||||
#define _GNU_SOURCE
|
#define _GNU_SOURCE
|
||||||
|
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
#include "org_apache_hadoop_io_nativeio_NativeIO.h"
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
|
@ -31,14 +35,19 @@
|
||||||
#include <sys/syscall.h>
|
#include <sys/syscall.h>
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "org_apache_hadoop.h"
|
#endif
|
||||||
#include "org_apache_hadoop_io_nativeio_NativeIO.h"
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
#include <assert.h>
|
||||||
|
#include <Windows.h>
|
||||||
|
#include "winutils.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "file_descriptor.h"
|
#include "file_descriptor.h"
|
||||||
#include "errno_enum.h"
|
#include "errno_enum.h"
|
||||||
|
|
||||||
// the NativeIO$Stat inner class and its constructor
|
// the NativeIO$POSIX$Stat inner class and its constructor
|
||||||
static jclass stat_clazz;
|
static jclass stat_clazz;
|
||||||
static jmethodID stat_ctor;
|
static jmethodID stat_ctor;
|
||||||
|
|
||||||
|
@ -53,26 +62,32 @@ static jobject pw_lock_object;
|
||||||
|
|
||||||
// Internal functions
|
// Internal functions
|
||||||
static void throw_ioe(JNIEnv* env, int errnum);
|
static void throw_ioe(JNIEnv* env, int errnum);
|
||||||
|
#ifdef UNIX
|
||||||
static ssize_t get_pw_buflen();
|
static ssize_t get_pw_buflen();
|
||||||
|
#endif
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns non-zero if the user has specified that the system
|
* Returns non-zero if the user has specified that the system
|
||||||
* has non-threadsafe implementations of getpwuid_r or getgrgid_r.
|
* has non-threadsafe implementations of getpwuid_r or getgrgid_r.
|
||||||
**/
|
**/
|
||||||
static int workaround_non_threadsafe_calls(JNIEnv *env, jclass clazz) {
|
static int workaround_non_threadsafe_calls(JNIEnv *env, jclass clazz) {
|
||||||
jfieldID needs_workaround_field = (*env)->GetStaticFieldID(env, clazz,
|
jboolean result;
|
||||||
"workaroundNonThreadSafePasswdCalls", "Z");
|
jfieldID needs_workaround_field = (*env)->GetStaticFieldID(
|
||||||
|
env, clazz,
|
||||||
|
"workaroundNonThreadSafePasswdCalls",
|
||||||
|
"Z");
|
||||||
PASS_EXCEPTIONS_RET(env, 0);
|
PASS_EXCEPTIONS_RET(env, 0);
|
||||||
assert(needs_workaround_field);
|
assert(needs_workaround_field);
|
||||||
|
|
||||||
jboolean result = (*env)->GetStaticBooleanField(
|
result = (*env)->GetStaticBooleanField(
|
||||||
env, clazz, needs_workaround_field);
|
env, clazz, needs_workaround_field);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
static void stat_init(JNIEnv *env, jclass nativeio_class) {
|
static void stat_init(JNIEnv *env, jclass nativeio_class) {
|
||||||
// Init Stat
|
// Init Stat
|
||||||
jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
|
jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat");
|
||||||
if (!clazz) {
|
if (!clazz) {
|
||||||
return; // exception has been raised
|
return; // exception has been raised
|
||||||
}
|
}
|
||||||
|
@ -85,6 +100,7 @@ static void stat_init(JNIEnv *env, jclass nativeio_class) {
|
||||||
if (!stat_ctor) {
|
if (!stat_ctor) {
|
||||||
return; // exception has been raised
|
return; // exception has been raised
|
||||||
}
|
}
|
||||||
|
|
||||||
jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
|
jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
|
||||||
if (!obj_class) {
|
if (!obj_class) {
|
||||||
return; // exception has been raised
|
return; // exception has been raised
|
||||||
|
@ -99,6 +115,7 @@ static void stat_init(JNIEnv *env, jclass nativeio_class) {
|
||||||
pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
|
pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
|
||||||
PASS_EXCEPTIONS(env);
|
PASS_EXCEPTIONS(env);
|
||||||
pw_lock_object = (*env)->NewGlobalRef(env, pw_lock_object);
|
pw_lock_object = (*env)->NewGlobalRef(env, pw_lock_object);
|
||||||
|
|
||||||
PASS_EXCEPTIONS(env);
|
PASS_EXCEPTIONS(env);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -113,6 +130,7 @@ static void stat_deinit(JNIEnv *env) {
|
||||||
pw_lock_object = NULL;
|
pw_lock_object = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static void nioe_init(JNIEnv *env) {
|
static void nioe_init(JNIEnv *env) {
|
||||||
// Init NativeIOException
|
// Init NativeIOException
|
||||||
|
@ -121,8 +139,15 @@ static void nioe_init(JNIEnv *env) {
|
||||||
PASS_EXCEPTIONS(env);
|
PASS_EXCEPTIONS(env);
|
||||||
|
|
||||||
nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
|
nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
|
||||||
|
#ifdef UNIX
|
||||||
nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
|
nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
|
||||||
"(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
|
"(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
|
||||||
|
"(Ljava/lang/String;I)V");
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void nioe_deinit(JNIEnv *env) {
|
static void nioe_deinit(JNIEnv *env) {
|
||||||
|
@ -143,32 +168,46 @@ static void nioe_deinit(JNIEnv *env) {
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT void JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
|
Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
|
||||||
JNIEnv *env, jclass clazz) {
|
JNIEnv *env, jclass clazz) {
|
||||||
|
#ifdef UNIX
|
||||||
stat_init(env, clazz);
|
stat_init(env, clazz);
|
||||||
PASS_EXCEPTIONS_GOTO(env, error);
|
PASS_EXCEPTIONS_GOTO(env, error);
|
||||||
|
#endif
|
||||||
nioe_init(env);
|
nioe_init(env);
|
||||||
PASS_EXCEPTIONS_GOTO(env, error);
|
PASS_EXCEPTIONS_GOTO(env, error);
|
||||||
fd_init(env);
|
fd_init(env);
|
||||||
PASS_EXCEPTIONS_GOTO(env, error);
|
PASS_EXCEPTIONS_GOTO(env, error);
|
||||||
|
#ifdef UNIX
|
||||||
errno_enum_init(env);
|
errno_enum_init(env);
|
||||||
PASS_EXCEPTIONS_GOTO(env, error);
|
PASS_EXCEPTIONS_GOTO(env, error);
|
||||||
|
#endif
|
||||||
return;
|
return;
|
||||||
error:
|
error:
|
||||||
// these are all idempodent and safe to call even if the
|
// these are all idempodent and safe to call even if the
|
||||||
// class wasn't initted yet
|
// class wasn't initted yet
|
||||||
|
#ifdef UNIX
|
||||||
stat_deinit(env);
|
stat_deinit(env);
|
||||||
|
#endif
|
||||||
nioe_deinit(env);
|
nioe_deinit(env);
|
||||||
fd_deinit(env);
|
fd_deinit(env);
|
||||||
|
#ifdef UNIX
|
||||||
errno_enum_deinit(env);
|
errno_enum_deinit(env);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
* Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
|
||||||
|
* Method: fstat
|
||||||
|
* Signature: (Ljava/io/FileDescriptor;)Lorg/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat;
|
||||||
* public static native Stat fstat(FileDescriptor fd);
|
* public static native Stat fstat(FileDescriptor fd);
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
*/
|
*/
|
||||||
JNIEXPORT jobject JNICALL
|
JNIEXPORT jobject JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_fstat(
|
||||||
JNIEnv *env, jclass clazz, jobject fd_object)
|
JNIEnv *env, jclass clazz, jobject fd_object)
|
||||||
{
|
{
|
||||||
|
#ifdef UNIX
|
||||||
jobject ret = NULL;
|
jobject ret = NULL;
|
||||||
|
|
||||||
int fd = fd_get(env, fd_object);
|
int fd = fd_get(env, fd_object);
|
||||||
|
@ -187,14 +226,26 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
|
||||||
|
|
||||||
cleanup:
|
cleanup:
|
||||||
return ret;
|
return ret;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function POSIX.fstat() is not supported on Windows");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* public static native void posix_fadvise(
|
* public static native void posix_fadvise(
|
||||||
* FileDescriptor fd, long offset, long len, int flags);
|
* FileDescriptor fd, long offset, long len, int flags);
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
*/
|
*/
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT void JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_posix_1fadvise(
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_posix_1fadvise(
|
||||||
JNIEnv *env, jclass clazz,
|
JNIEnv *env, jclass clazz,
|
||||||
jobject fd_object, jlong offset, jlong len, jint flags)
|
jobject fd_object, jlong offset, jlong len, jint flags)
|
||||||
{
|
{
|
||||||
|
@ -240,9 +291,12 @@ static int manual_sync_file_range (int fd, __off64_t from, __off64_t to, unsigne
|
||||||
/**
|
/**
|
||||||
* public static native void sync_file_range(
|
* public static native void sync_file_range(
|
||||||
* FileDescriptor fd, long offset, long len, int flags);
|
* FileDescriptor fd, long offset, long len, int flags);
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
*/
|
*/
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT void JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_sync_1file_1range(
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_sync_1file_1range(
|
||||||
JNIEnv *env, jclass clazz,
|
JNIEnv *env, jclass clazz,
|
||||||
jobject fd_object, jlong offset, jlong len, jint flags)
|
jobject fd_object, jlong offset, jlong len, jint flags)
|
||||||
{
|
{
|
||||||
|
@ -284,13 +338,20 @@ static int toFreeBSDFlags(int flags)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
* Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
|
||||||
|
* Method: open
|
||||||
|
* Signature: (Ljava/lang/String;II)Ljava/io/FileDescriptor;
|
||||||
* public static native FileDescriptor open(String path, int flags, int mode);
|
* public static native FileDescriptor open(String path, int flags, int mode);
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
*/
|
*/
|
||||||
JNIEXPORT jobject JNICALL
|
JNIEXPORT jobject JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_open(
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_open(
|
||||||
JNIEnv *env, jclass clazz, jstring j_path,
|
JNIEnv *env, jclass clazz, jstring j_path,
|
||||||
jint flags, jint mode)
|
jint flags, jint mode)
|
||||||
{
|
{
|
||||||
|
#ifdef UNIX
|
||||||
#ifdef __FreeBSD__
|
#ifdef __FreeBSD__
|
||||||
flags = toFreeBSDFlags(flags);
|
flags = toFreeBSDFlags(flags);
|
||||||
#endif
|
#endif
|
||||||
|
@ -318,16 +379,90 @@ cleanup:
|
||||||
(*env)->ReleaseStringUTFChars(env, j_path, path);
|
(*env)->ReleaseStringUTFChars(env, j_path, path);
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function POSIX.open() is not supported on Windows");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/*
|
||||||
* public static native void chmod(String path, int mode) throws IOException;
|
* Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
|
||||||
|
* Method: createFile
|
||||||
|
* Signature: (Ljava/lang/String;JJJ)Ljava/io/FileDescriptor;
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
*/
|
*/
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFile
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
|
(JNIEnv *env, jclass clazz, jstring j_path,
|
||||||
JNIEnv *env, jclass clazz, jstring j_path,
|
jlong desiredAccess, jlong shareMode, jlong creationDisposition)
|
||||||
jint mode)
|
|
||||||
{
|
{
|
||||||
|
#ifdef UNIX
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function Windows.createFile() is not supported on Unix");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
BOOL isSymlink = FALSE;
|
||||||
|
BOOL isJunction = FALSE;
|
||||||
|
DWORD dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS;
|
||||||
|
jobject ret = (jobject) NULL;
|
||||||
|
HANDLE hFile = INVALID_HANDLE_VALUE;
|
||||||
|
WCHAR *path = (WCHAR *) (*env)->GetStringChars(env, j_path, (jboolean*)NULL);
|
||||||
|
if (path == NULL) goto cleanup;
|
||||||
|
|
||||||
|
// Set the flag for a symbolic link or a junctions point only when it exists.
|
||||||
|
// According to MSDN if the call to CreateFile() function creates a file,
|
||||||
|
// there is no change in behavior. So we do not throw if no file is found.
|
||||||
|
//
|
||||||
|
dwRtnCode = SymbolicLinkCheck(path, &isSymlink);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
|
||||||
|
throw_ioe(env, dwRtnCode);
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
dwRtnCode = JunctionPointCheck(path, &isJunction);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
|
||||||
|
throw_ioe(env, dwRtnCode);
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
if (isSymlink || isJunction)
|
||||||
|
dwFlagsAndAttributes |= FILE_FLAG_OPEN_REPARSE_POINT;
|
||||||
|
|
||||||
|
hFile = CreateFile(path,
|
||||||
|
(DWORD) desiredAccess,
|
||||||
|
(DWORD) shareMode,
|
||||||
|
(LPSECURITY_ATTRIBUTES ) NULL,
|
||||||
|
(DWORD) creationDisposition,
|
||||||
|
dwFlagsAndAttributes,
|
||||||
|
NULL);
|
||||||
|
if (hFile == INVALID_HANDLE_VALUE) {
|
||||||
|
throw_ioe(env, GetLastError());
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = fd_create(env, (long) hFile);
|
||||||
|
cleanup:
|
||||||
|
if (path != NULL) {
|
||||||
|
(*env)->ReleaseStringChars(env, j_path, (const jchar*)path);
|
||||||
|
}
|
||||||
|
return (jobject) ret;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
|
||||||
|
* Method: chmod
|
||||||
|
* Signature: (Ljava/lang/String;I)V
|
||||||
|
*/
|
||||||
|
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_chmodImpl
|
||||||
|
(JNIEnv *env, jclass clazz, jstring j_path, jint mode)
|
||||||
|
{
|
||||||
|
#ifdef UNIX
|
||||||
const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
|
const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
|
||||||
if (path == NULL) return; // JVM throws Exception for us
|
if (path == NULL) return; // JVM throws Exception for us
|
||||||
|
|
||||||
|
@ -336,15 +471,30 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
|
||||||
}
|
}
|
||||||
|
|
||||||
(*env)->ReleaseStringUTFChars(env, j_path, path);
|
(*env)->ReleaseStringUTFChars(env, j_path, path);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
LPCWSTR path = (LPCWSTR) (*env)->GetStringChars(env, j_path, NULL);
|
||||||
|
if (path == NULL) return; // JVM throws Exception for us
|
||||||
|
|
||||||
|
if ((dwRtnCode = ChangeFileModeByMask((LPCWSTR) path, mode)) != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
throw_ioe(env, dwRtnCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
(*env)->ReleaseStringChars(env, j_path, (const jchar*) path);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* static native String getUserName(int uid);
|
* static native String getUserName(int uid);
|
||||||
*/
|
*/
|
||||||
JNIEXPORT jstring JNICALL
|
JNIEXPORT jstring JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env,
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getUserName(
|
||||||
jclass clazz, jint uid)
|
JNIEnv *env, jclass clazz, jint uid)
|
||||||
{
|
{
|
||||||
|
#ifdef UNIX
|
||||||
int pw_lock_locked = 0;
|
int pw_lock_locked = 0;
|
||||||
if (pw_lock_object != NULL) {
|
if (pw_lock_object != NULL) {
|
||||||
if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
|
if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
|
||||||
|
@ -396,15 +546,26 @@ cleanup:
|
||||||
}
|
}
|
||||||
if (pw_buf != NULL) free(pw_buf);
|
if (pw_buf != NULL) free(pw_buf);
|
||||||
return jstr_username;
|
return jstr_username;
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function POSIX.getUserName() is not supported on Windows");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* static native String getGroupName(int gid);
|
* static native String getGroupName(int gid);
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
*/
|
*/
|
||||||
JNIEXPORT jstring JNICALL
|
JNIEXPORT jstring JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_getGroupName(JNIEnv *env,
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getGroupName(
|
||||||
jclass clazz, jint gid)
|
JNIEnv *env, jclass clazz, jint gid)
|
||||||
{
|
{
|
||||||
|
#ifdef UNIX
|
||||||
int pw_lock_locked = 0;
|
int pw_lock_locked = 0;
|
||||||
|
|
||||||
if (pw_lock_object != NULL) {
|
if (pw_lock_object != NULL) {
|
||||||
|
@ -458,14 +619,21 @@ cleanup:
|
||||||
}
|
}
|
||||||
if (pw_buf != NULL) free(pw_buf);
|
if (pw_buf != NULL) free(pw_buf);
|
||||||
return jstr_groupname;
|
return jstr_groupname;
|
||||||
}
|
#endif // UNIX
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function POSIX.getUserName() is not supported on Windows");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Throw a java.IO.IOException, generating the message from errno.
|
* Throw a java.IO.IOException, generating the message from errno.
|
||||||
*/
|
*/
|
||||||
static void throw_ioe(JNIEnv* env, int errnum)
|
static void throw_ioe(JNIEnv* env, int errnum)
|
||||||
{
|
{
|
||||||
|
#ifdef UNIX
|
||||||
char message[80];
|
char message[80];
|
||||||
jstring jstr_message;
|
jstring jstr_message;
|
||||||
|
|
||||||
|
@ -490,9 +658,51 @@ static void throw_ioe(JNIEnv* env, int errnum)
|
||||||
err:
|
err:
|
||||||
if (jstr_message != NULL)
|
if (jstr_message != NULL)
|
||||||
(*env)->ReleaseStringUTFChars(env, jstr_message, message);
|
(*env)->ReleaseStringUTFChars(env, jstr_message, message);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
DWORD len = 0;
|
||||||
|
LPWSTR buffer = NULL;
|
||||||
|
const jchar* message = NULL;
|
||||||
|
jstring jstr_message = NULL;
|
||||||
|
jthrowable obj = NULL;
|
||||||
|
|
||||||
|
len = FormatMessageW(
|
||||||
|
FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
|
||||||
|
NULL, *(DWORD*) (&errnum), // reinterpret cast
|
||||||
|
MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
|
||||||
|
(LPWSTR) &buffer, 0, NULL);
|
||||||
|
|
||||||
|
if (len > 0)
|
||||||
|
{
|
||||||
|
message = (const jchar*) buffer;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
message = (const jchar*) L"Unknown error.";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ((jstr_message = (*env)->NewString(env, message, len)) == NULL)
|
||||||
|
goto err;
|
||||||
|
LocalFree(buffer);
|
||||||
|
buffer = NULL; // Set buffer to NULL to avoid double free
|
||||||
|
|
||||||
|
obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
|
||||||
|
jstr_message, errnum);
|
||||||
|
if (obj == NULL) goto err;
|
||||||
|
|
||||||
|
(*env)->Throw(env, obj);
|
||||||
|
return;
|
||||||
|
|
||||||
|
err:
|
||||||
|
if (jstr_message != NULL)
|
||||||
|
(*env)->ReleaseStringChars(env, jstr_message, message);
|
||||||
|
LocalFree(buffer);
|
||||||
|
return;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
/*
|
/*
|
||||||
* Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
|
* Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
|
||||||
*/
|
*/
|
||||||
|
@ -503,6 +713,104 @@ ssize_t get_pw_buflen() {
|
||||||
#endif
|
#endif
|
||||||
return (ret > 512) ? ret : 512;
|
return (ret > 512) ? ret : 512;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
|
||||||
|
* Method: getOwnerOnWindows
|
||||||
|
* Signature: (Ljava/io/FileDescriptor;)Ljava/lang/String;
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
|
*/
|
||||||
|
JNIEXPORT jstring JNICALL
|
||||||
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getOwner
|
||||||
|
(JNIEnv *env, jclass clazz, jobject fd_object)
|
||||||
|
{
|
||||||
|
#ifdef UNIX
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function Windows.getOwner() is not supported on Unix");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
PSID pSidOwner = NULL;
|
||||||
|
PSECURITY_DESCRIPTOR pSD = NULL;
|
||||||
|
LPWSTR ownerName = (LPWSTR)NULL;
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
jstring jstr_username = NULL;
|
||||||
|
HANDLE hFile = (HANDLE) fd_get(env, fd_object);
|
||||||
|
PASS_EXCEPTIONS_GOTO(env, cleanup);
|
||||||
|
|
||||||
|
dwRtnCode = GetSecurityInfo(
|
||||||
|
hFile,
|
||||||
|
SE_FILE_OBJECT,
|
||||||
|
OWNER_SECURITY_INFORMATION,
|
||||||
|
&pSidOwner,
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
&pSD);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS) {
|
||||||
|
throw_ioe(env, dwRtnCode);
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwRtnCode = GetAccntNameFromSid(pSidOwner, &ownerName);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS) {
|
||||||
|
throw_ioe(env, dwRtnCode);
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
|
||||||
|
jstr_username = (*env)->NewString(env, ownerName, (jsize) wcslen(ownerName));
|
||||||
|
if (jstr_username == NULL) goto cleanup;
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
LocalFree(ownerName);
|
||||||
|
LocalFree(pSD);
|
||||||
|
return jstr_username;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
|
||||||
|
* Method: setFilePointer
|
||||||
|
* Signature: (Ljava/io/FileDescriptor;JJ)J
|
||||||
|
*
|
||||||
|
* The "00024" in the function name is an artifact of how JNI encodes
|
||||||
|
* special characters. U+0024 is '$'.
|
||||||
|
*/
|
||||||
|
JNIEXPORT jlong JNICALL
|
||||||
|
Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_setFilePointer
|
||||||
|
(JNIEnv *env, jclass clazz, jobject fd_object, jlong distanceToMove, jlong moveMethod)
|
||||||
|
{
|
||||||
|
#ifdef UNIX
|
||||||
|
THROW(env, "java/io/IOException",
|
||||||
|
"The function setFilePointer(FileDescriptor) is not supported on Unix");
|
||||||
|
return NULL;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
DWORD distanceToMoveLow = (DWORD) distanceToMove;
|
||||||
|
LONG distanceToMoveHigh = (LONG) (distanceToMove >> 32);
|
||||||
|
DWORD distanceMovedLow = 0;
|
||||||
|
HANDLE hFile = (HANDLE) fd_get(env, fd_object);
|
||||||
|
PASS_EXCEPTIONS_GOTO(env, cleanup);
|
||||||
|
|
||||||
|
distanceMovedLow = SetFilePointer(hFile,
|
||||||
|
distanceToMoveLow, &distanceToMoveHigh, (DWORD) moveMethod);
|
||||||
|
|
||||||
|
if (distanceMovedLow == INVALID_SET_FILE_POINTER) {
|
||||||
|
throw_ioe(env, GetLastError());
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
|
||||||
|
return ((jlong) distanceToMoveHigh << 32) | (jlong) distanceMovedLow;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
JNIEXPORT void JNICALL
|
JNIEXPORT void JNICALL
|
||||||
Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env,
|
Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env,
|
||||||
|
|
|
@ -26,6 +26,10 @@ static jfieldID fd_descriptor;
|
||||||
// the no-argument constructor
|
// the no-argument constructor
|
||||||
static jmethodID fd_constructor;
|
static jmethodID fd_constructor;
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
// the internal field for the long handle
|
||||||
|
static jfieldID fd_handle;
|
||||||
|
#endif
|
||||||
|
|
||||||
void fd_init(JNIEnv* env)
|
void fd_init(JNIEnv* env)
|
||||||
{
|
{
|
||||||
|
@ -37,6 +41,12 @@ void fd_init(JNIEnv* env)
|
||||||
|
|
||||||
fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
|
fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
|
||||||
PASS_EXCEPTIONS(env);
|
PASS_EXCEPTIONS(env);
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
fd_handle = (*env)->GetFieldID(env, fd_class, "handle", "J");
|
||||||
|
PASS_EXCEPTIONS(env);
|
||||||
|
#endif
|
||||||
|
|
||||||
fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
|
fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,9 +56,13 @@ void fd_deinit(JNIEnv *env) {
|
||||||
fd_class = NULL;
|
fd_class = NULL;
|
||||||
}
|
}
|
||||||
fd_descriptor = NULL;
|
fd_descriptor = NULL;
|
||||||
|
#ifdef WINDOWS
|
||||||
|
fd_handle = NULL;
|
||||||
|
#endif
|
||||||
fd_constructor = NULL;
|
fd_constructor = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
/*
|
/*
|
||||||
* Given an instance 'obj' of java.io.FileDescriptor, return the
|
* Given an instance 'obj' of java.io.FileDescriptor, return the
|
||||||
* underlying fd, or throw if unavailable
|
* underlying fd, or throw if unavailable
|
||||||
|
@ -72,3 +86,30 @@ jobject fd_create(JNIEnv *env, int fd) {
|
||||||
(*env)->SetIntField(env, obj, fd_descriptor, fd);
|
(*env)->SetIntField(env, obj, fd_descriptor, fd);
|
||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
/*
|
||||||
|
* Given an instance 'obj' of java.io.FileDescriptor, return the
|
||||||
|
* underlying fd, or throw if unavailable
|
||||||
|
*/
|
||||||
|
long fd_get(JNIEnv* env, jobject obj) {
|
||||||
|
if (obj == NULL) {
|
||||||
|
THROW(env, "java/lang/NullPointerException",
|
||||||
|
"FileDescriptor object is null");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return (long) (*env)->GetLongField(env, obj, fd_handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Create a FileDescriptor object corresponding to the given int fd
|
||||||
|
*/
|
||||||
|
jobject fd_create(JNIEnv *env, long fd) {
|
||||||
|
jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
|
||||||
|
PASS_EXCEPTIONS_RET(env, (jobject) NULL);
|
||||||
|
|
||||||
|
(*env)->SetLongField(env, obj, fd_handle, fd);
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
|
@ -18,11 +18,19 @@
|
||||||
#define FILE_DESCRIPTOR_H
|
#define FILE_DESCRIPTOR_H
|
||||||
|
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
|
||||||
void fd_init(JNIEnv *env);
|
void fd_init(JNIEnv *env);
|
||||||
void fd_deinit(JNIEnv *env);
|
void fd_deinit(JNIEnv *env);
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
int fd_get(JNIEnv* env, jobject obj);
|
int fd_get(JNIEnv* env, jobject obj);
|
||||||
jobject fd_create(JNIEnv *env, int fd);
|
jobject fd_create(JNIEnv *env, int fd);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WINDOWS
|
||||||
|
long fd_get(JNIEnv* env, jobject obj);
|
||||||
|
jobject fd_create(JNIEnv *env, long fd);
|
||||||
|
#endif
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -0,0 +1,131 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
#include <jni.h>
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
#include "org_apache_hadoop_security_JniBasedUnixGroupsMapping.h"
|
||||||
|
|
||||||
|
#include <assert.h>
|
||||||
|
#include <Windows.h>
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
static jobjectArray emptyGroups = NULL;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Throw a java.IO.IOException, generating the message from errno.
|
||||||
|
*/
|
||||||
|
static void throw_ioexception(JNIEnv* env, DWORD errnum)
|
||||||
|
{
|
||||||
|
DWORD len = 0;
|
||||||
|
LPSTR buffer = NULL;
|
||||||
|
const char* message = NULL;
|
||||||
|
|
||||||
|
len = FormatMessageA(
|
||||||
|
FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
|
||||||
|
NULL, *(DWORD*) (&errnum), // reinterpret cast
|
||||||
|
MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
|
||||||
|
(LPSTR*)&buffer, 0, NULL);
|
||||||
|
|
||||||
|
if (len > 0)
|
||||||
|
{
|
||||||
|
message = buffer;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
message = "Unknown error.";
|
||||||
|
}
|
||||||
|
|
||||||
|
THROW(env, "java/io/IOException", message);
|
||||||
|
|
||||||
|
LocalFree(buffer);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
JNIEXPORT jobjectArray JNICALL
|
||||||
|
Java_org_apache_hadoop_security_JniBasedUnixGroupsMapping_getGroupForUser
|
||||||
|
(JNIEnv *env, jobject jobj, jstring juser) {
|
||||||
|
const WCHAR *user = NULL;
|
||||||
|
jobjectArray jgroups = NULL;
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
LPLOCALGROUP_USERS_INFO_0 groups = NULL;
|
||||||
|
LPLOCALGROUP_USERS_INFO_0 tmpGroups = NULL;
|
||||||
|
DWORD ngroups = 0;
|
||||||
|
|
||||||
|
int i;
|
||||||
|
|
||||||
|
if (emptyGroups == NULL) {
|
||||||
|
jobjectArray lEmptyGroups = (jobjectArray)(*env)->NewObjectArray(env, 0,
|
||||||
|
(*env)->FindClass(env, "java/lang/String"), NULL);
|
||||||
|
if (lEmptyGroups == NULL) {
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
emptyGroups = (*env)->NewGlobalRef(env, lEmptyGroups);
|
||||||
|
if (emptyGroups == NULL) {
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
user = (*env)->GetStringChars(env, juser, NULL);
|
||||||
|
if (user == NULL) {
|
||||||
|
THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for user buffer");
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwRtnCode = GetLocalGroupsForUser(user, &groups, &ngroups);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS) {
|
||||||
|
throw_ioexception(env, dwRtnCode);
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
|
||||||
|
jgroups = (jobjectArray)(*env)->NewObjectArray(env, ngroups,
|
||||||
|
(*env)->FindClass(env, "java/lang/String"), NULL);
|
||||||
|
if (jgroups == NULL) {
|
||||||
|
THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for group buffer");
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
|
||||||
|
// use a tmp pointer to iterate over groups and keep the original pointer
|
||||||
|
// for memory deallocation
|
||||||
|
tmpGroups = groups;
|
||||||
|
|
||||||
|
// fill the output string array
|
||||||
|
for (i = 0; i < ngroups; i++) {
|
||||||
|
jsize groupStringLen = (jsize)wcslen(tmpGroups->lgrui0_name);
|
||||||
|
jstring jgrp = (*env)->NewString(env, tmpGroups->lgrui0_name, groupStringLen);
|
||||||
|
if (jgrp == NULL) {
|
||||||
|
THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for groups buffer");
|
||||||
|
goto cleanup;
|
||||||
|
}
|
||||||
|
(*env)->SetObjectArrayElement(env, jgroups, i, jgrp);
|
||||||
|
// move on to the next group
|
||||||
|
tmpGroups++;
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
if (groups != NULL) NetApiBufferFree(groups);
|
||||||
|
|
||||||
|
if (user != NULL) {
|
||||||
|
(*env)->ReleaseStringChars(env, juser, user);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dwRtnCode == ERROR_SUCCESS) {
|
||||||
|
return jgroups;
|
||||||
|
} else {
|
||||||
|
return emptyGroups;
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,7 +16,11 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
|
|
||||||
|
|
|
@ -16,18 +16,22 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <arpa/inet.h>
|
#include "org_apache_hadoop.h"
|
||||||
|
#include "org_apache_hadoop_util_NativeCrc32.h"
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <inttypes.h>
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <unistd.h>
|
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <inttypes.h>
|
||||||
|
#include <arpa/inet.h>
|
||||||
|
#include <unistd.h>
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "org_apache_hadoop.h"
|
|
||||||
#include "org_apache_hadoop_util_NativeCrc32.h"
|
|
||||||
#include "gcc_optimizations.h"
|
#include "gcc_optimizations.h"
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
#include "bulk_crc32.h"
|
#include "bulk_crc32.h"
|
||||||
|
|
||||||
static void throw_checksum_exception(JNIEnv *env,
|
static void throw_checksum_exception(JNIEnv *env,
|
||||||
|
@ -36,6 +40,9 @@ static void throw_checksum_exception(JNIEnv *env,
|
||||||
char message[1024];
|
char message[1024];
|
||||||
jstring jstr_message;
|
jstring jstr_message;
|
||||||
char *filename;
|
char *filename;
|
||||||
|
jclass checksum_exception_clazz;
|
||||||
|
jmethodID checksum_exception_ctor;
|
||||||
|
jthrowable obj;
|
||||||
|
|
||||||
// Get filename as C string, or "null" if not provided
|
// Get filename as C string, or "null" if not provided
|
||||||
if (j_filename == NULL) {
|
if (j_filename == NULL) {
|
||||||
|
@ -50,28 +57,38 @@ static void throw_checksum_exception(JNIEnv *env,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Format error message
|
// Format error message
|
||||||
|
#ifdef WINDOWS
|
||||||
|
_snprintf_s(
|
||||||
|
message,
|
||||||
|
sizeof(message),
|
||||||
|
_TRUNCATE,
|
||||||
|
"Checksum error: %s at %I64d exp: %d got: %d",
|
||||||
|
filename, pos, expected_crc, got_crc);
|
||||||
|
#else
|
||||||
snprintf(message, sizeof(message),
|
snprintf(message, sizeof(message),
|
||||||
"Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
|
"Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
|
||||||
filename, pos, expected_crc, got_crc);
|
filename, pos, expected_crc, got_crc);
|
||||||
|
#endif // WINDOWS
|
||||||
|
|
||||||
if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
|
if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
|
||||||
goto cleanup;
|
goto cleanup;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Throw exception
|
// Throw exception
|
||||||
jclass checksum_exception_clazz = (*env)->FindClass(
|
checksum_exception_clazz = (*env)->FindClass(
|
||||||
env, "org/apache/hadoop/fs/ChecksumException");
|
env, "org/apache/hadoop/fs/ChecksumException");
|
||||||
if (checksum_exception_clazz == NULL) {
|
if (checksum_exception_clazz == NULL) {
|
||||||
goto cleanup;
|
goto cleanup;
|
||||||
}
|
}
|
||||||
|
|
||||||
jmethodID checksum_exception_ctor = (*env)->GetMethodID(env,
|
checksum_exception_ctor = (*env)->GetMethodID(env,
|
||||||
checksum_exception_clazz, "<init>",
|
checksum_exception_clazz, "<init>",
|
||||||
"(Ljava/lang/String;J)V");
|
"(Ljava/lang/String;J)V");
|
||||||
if (checksum_exception_ctor == NULL) {
|
if (checksum_exception_ctor == NULL) {
|
||||||
goto cleanup;
|
goto cleanup;
|
||||||
}
|
}
|
||||||
|
|
||||||
jthrowable obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
|
obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
|
||||||
checksum_exception_ctor, jstr_message, pos);
|
checksum_exception_ctor, jstr_message, pos);
|
||||||
if (obj == NULL) goto cleanup;
|
if (obj == NULL) goto cleanup;
|
||||||
|
|
||||||
|
@ -103,6 +120,14 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunk
|
||||||
jobject j_data, jint data_offset, jint data_len,
|
jobject j_data, jint data_offset, jint data_len,
|
||||||
jstring j_filename, jlong base_pos)
|
jstring j_filename, jlong base_pos)
|
||||||
{
|
{
|
||||||
|
uint8_t *sums_addr;
|
||||||
|
uint8_t *data_addr;
|
||||||
|
uint32_t *sums;
|
||||||
|
uint8_t *data;
|
||||||
|
int crc_type;
|
||||||
|
crc32_error_t error_data;
|
||||||
|
int ret;
|
||||||
|
|
||||||
if (unlikely(!j_sums || !j_data)) {
|
if (unlikely(!j_sums || !j_data)) {
|
||||||
THROW(env, "java/lang/NullPointerException",
|
THROW(env, "java/lang/NullPointerException",
|
||||||
"input ByteBuffers must not be null");
|
"input ByteBuffers must not be null");
|
||||||
|
@ -110,8 +135,8 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunk
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert direct byte buffers to C pointers
|
// Convert direct byte buffers to C pointers
|
||||||
uint8_t *sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
|
sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
|
||||||
uint8_t *data_addr = (*env)->GetDirectBufferAddress(env, j_data);
|
data_addr = (*env)->GetDirectBufferAddress(env, j_data);
|
||||||
|
|
||||||
if (unlikely(!sums_addr || !data_addr)) {
|
if (unlikely(!sums_addr || !data_addr)) {
|
||||||
THROW(env, "java/lang/IllegalArgumentException",
|
THROW(env, "java/lang/IllegalArgumentException",
|
||||||
|
@ -129,16 +154,15 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunk
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t *sums = (uint32_t *)(sums_addr + sums_offset);
|
sums = (uint32_t *)(sums_addr + sums_offset);
|
||||||
uint8_t *data = data_addr + data_offset;
|
data = data_addr + data_offset;
|
||||||
|
|
||||||
// Convert to correct internal C constant for CRC type
|
// Convert to correct internal C constant for CRC type
|
||||||
int crc_type = convert_java_crc_type(env, j_crc_type);
|
crc_type = convert_java_crc_type(env, j_crc_type);
|
||||||
if (crc_type == -1) return; // exception already thrown
|
if (crc_type == -1) return; // exception already thrown
|
||||||
|
|
||||||
// Setup complete. Actually verify checksums.
|
// Setup complete. Actually verify checksums.
|
||||||
crc32_error_t error_data;
|
ret = bulk_verify_crc(data, data_len, sums, crc_type,
|
||||||
int ret = bulk_verify_crc(data, data_len, sums, crc_type,
|
|
||||||
bytes_per_checksum, &error_data);
|
bytes_per_checksum, &error_data);
|
||||||
if (likely(ret == CHECKSUMS_VALID)) {
|
if (likely(ret == CHECKSUMS_VALID)) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -21,25 +21,31 @@
|
||||||
* All rights reserved. Use of this source code is governed by a
|
* All rights reserved. Use of this source code is governed by a
|
||||||
* BSD-style license that can be found in the LICENSE file.
|
* BSD-style license that can be found in the LICENSE file.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <arpa/inet.h>
|
|
||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <arpa/inet.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
#include "crc32_zlib_polynomial_tables.h"
|
#include "crc32_zlib_polynomial_tables.h"
|
||||||
#include "crc32c_tables.h"
|
#include "crc32c_tables.h"
|
||||||
#include "bulk_crc32.h"
|
#include "bulk_crc32.h"
|
||||||
#include "gcc_optimizations.h"
|
#include "gcc_optimizations.h"
|
||||||
|
|
||||||
#ifndef __FreeBSD__
|
#if (!defined(__FreeBSD__) && !defined(WINDOWS))
|
||||||
#define USE_PIPELINED
|
#define USE_PIPELINED
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define CRC_INITIAL_VAL 0xffffffff
|
#define CRC_INITIAL_VAL 0xffffffff
|
||||||
|
|
||||||
typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
|
typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
|
||||||
static inline uint32_t crc_val(uint32_t crc);
|
static uint32_t crc_val(uint32_t crc);
|
||||||
static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
|
static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
|
||||||
static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
|
static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
|
||||||
|
|
||||||
|
@ -187,7 +193,7 @@ return_crc_error:
|
||||||
/**
|
/**
|
||||||
* Extract the final result of a CRC
|
* Extract the final result of a CRC
|
||||||
*/
|
*/
|
||||||
static inline uint32_t crc_val(uint32_t crc) {
|
uint32_t crc_val(uint32_t crc) {
|
||||||
return ~crc;
|
return ~crc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,11 +206,13 @@ static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length) {
|
||||||
uint32_t end_bytes = length - running_length;
|
uint32_t end_bytes = length - running_length;
|
||||||
int li;
|
int li;
|
||||||
for (li=0; li < running_length/8; li++) {
|
for (li=0; li < running_length/8; li++) {
|
||||||
|
uint32_t term1;
|
||||||
|
uint32_t term2;
|
||||||
crc ^= *(uint32_t *)buf;
|
crc ^= *(uint32_t *)buf;
|
||||||
buf += 4;
|
buf += 4;
|
||||||
uint32_t term1 = CRC32C_T8_7[crc & 0x000000FF] ^
|
term1 = CRC32C_T8_7[crc & 0x000000FF] ^
|
||||||
CRC32C_T8_6[(crc >> 8) & 0x000000FF];
|
CRC32C_T8_6[(crc >> 8) & 0x000000FF];
|
||||||
uint32_t term2 = crc >> 16;
|
term2 = crc >> 16;
|
||||||
crc = term1 ^
|
crc = term1 ^
|
||||||
CRC32C_T8_5[term2 & 0x000000FF] ^
|
CRC32C_T8_5[term2 & 0x000000FF] ^
|
||||||
CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
|
CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
|
||||||
|
@ -234,11 +242,13 @@ static uint32_t crc32_zlib_sb8(
|
||||||
uint32_t end_bytes = length - running_length;
|
uint32_t end_bytes = length - running_length;
|
||||||
int li;
|
int li;
|
||||||
for (li=0; li < running_length/8; li++) {
|
for (li=0; li < running_length/8; li++) {
|
||||||
|
uint32_t term1;
|
||||||
|
uint32_t term2;
|
||||||
crc ^= *(uint32_t *)buf;
|
crc ^= *(uint32_t *)buf;
|
||||||
buf += 4;
|
buf += 4;
|
||||||
uint32_t term1 = CRC32_T8_7[crc & 0x000000FF] ^
|
term1 = CRC32_T8_7[crc & 0x000000FF] ^
|
||||||
CRC32_T8_6[(crc >> 8) & 0x000000FF];
|
CRC32_T8_6[(crc >> 8) & 0x000000FF];
|
||||||
uint32_t term2 = crc >> 16;
|
term2 = crc >> 16;
|
||||||
crc = term1 ^
|
crc = term1 ^
|
||||||
CRC32_T8_5[term2 & 0x000000FF] ^
|
CRC32_T8_5[term2 & 0x000000FF] ^
|
||||||
CRC32_T8_4[(term2 >> 8) & 0x000000FF];
|
CRC32_T8_4[(term2 >> 8) & 0x000000FF];
|
||||||
|
|
|
@ -19,7 +19,10 @@
|
||||||
#define BULK_CRC32_H_INCLUDED
|
#define BULK_CRC32_H_INCLUDED
|
||||||
|
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
|
#ifdef UNIX
|
||||||
#include <unistd.h> /* for size_t */
|
#include <unistd.h> /* for size_t */
|
||||||
|
#endif // UNIX
|
||||||
|
|
||||||
// Constants for different CRC algorithms
|
// Constants for different CRC algorithms
|
||||||
#define CRC32C_POLYNOMIAL 1
|
#define CRC32C_POLYNOMIAL 1
|
||||||
|
|
|
@ -24,10 +24,13 @@
|
||||||
#if !defined ORG_APACHE_HADOOP_H
|
#if !defined ORG_APACHE_HADOOP_H
|
||||||
#define ORG_APACHE_HADOOP_H
|
#define ORG_APACHE_HADOOP_H
|
||||||
|
|
||||||
#include <dlfcn.h>
|
#if defined(_WIN32)
|
||||||
#include <jni.h>
|
#undef UNIX
|
||||||
|
#define WINDOWS
|
||||||
#include "config.h"
|
#else
|
||||||
|
#undef WINDOWS
|
||||||
|
#define UNIX
|
||||||
|
#endif
|
||||||
|
|
||||||
/* A helper macro to 'throw' a java exception. */
|
/* A helper macro to 'throw' a java exception. */
|
||||||
#define THROW(env, exception_name, message) \
|
#define THROW(env, exception_name, message) \
|
||||||
|
@ -55,6 +58,14 @@
|
||||||
if ((*env)->ExceptionCheck(env)) return (ret); \
|
if ((*env)->ExceptionCheck(env)) return (ret); \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unix definitions
|
||||||
|
*/
|
||||||
|
#ifdef UNIX
|
||||||
|
#include <config.h>
|
||||||
|
#include <dlfcn.h>
|
||||||
|
#include <jni.h>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A helper function to dlsym a 'symbol' from a given library-handle.
|
* A helper function to dlsym a 'symbol' from a given library-handle.
|
||||||
*
|
*
|
||||||
|
@ -84,6 +95,76 @@ void *do_dlsym(JNIEnv *env, void *handle, const char *symbol) {
|
||||||
if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
|
if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
|
||||||
return; \
|
return; \
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
// Unix part end
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Windows definitions
|
||||||
|
*/
|
||||||
|
#ifdef WINDOWS
|
||||||
|
|
||||||
|
/* Force using Unicode throughout the code */
|
||||||
|
#ifndef UNICODE
|
||||||
|
#define UNICODE
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Microsoft C Compiler does not support the C99 inline keyword */
|
||||||
|
#ifndef __cplusplus
|
||||||
|
#define inline __inline;
|
||||||
|
#endif // _cplusplus
|
||||||
|
|
||||||
|
/* Optimization macros supported by GCC but for which there is no
|
||||||
|
direct equivalent in the Microsoft C compiler */
|
||||||
|
#define likely(_c) (_c)
|
||||||
|
#define unlikely(_c) (_c)
|
||||||
|
|
||||||
|
/* Disable certain warnings in the native CRC32 code. */
|
||||||
|
#pragma warning(disable:4018) // Signed/unsigned mismatch.
|
||||||
|
#pragma warning(disable:4244) // Possible loss of data in conversion.
|
||||||
|
#pragma warning(disable:4267) // Possible loss of data.
|
||||||
|
#pragma warning(disable:4996) // Use of deprecated function.
|
||||||
|
|
||||||
|
#include <Windows.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <jni.h>
|
||||||
|
|
||||||
|
#define snprintf(a, b ,c, d) _snprintf_s((a), (b), _TRUNCATE, (c), (d))
|
||||||
|
|
||||||
|
/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
|
||||||
|
#define LOAD_DYNAMIC_SYMBOL(func_type, func_ptr, env, handle, symbol) \
|
||||||
|
if ((func_ptr = (func_type) do_dlsym(env, handle, symbol)) == NULL) { \
|
||||||
|
return; \
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A helper function to dynamic load a 'symbol' from a given library-handle.
|
||||||
|
*
|
||||||
|
* @param env jni handle to report contingencies.
|
||||||
|
* @param handle handle to the dynamic library.
|
||||||
|
* @param symbol symbol to load.
|
||||||
|
* @return returns the address where the symbol is loaded in memory,
|
||||||
|
* <code>NULL</code> on error.
|
||||||
|
*/
|
||||||
|
static FARPROC WINAPI do_dlsym(JNIEnv *env, HMODULE handle, LPCSTR symbol) {
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
FARPROC func_ptr = NULL;
|
||||||
|
|
||||||
|
if (!env || !handle || !symbol) {
|
||||||
|
THROW(env, "java/lang/InternalError", NULL);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
func_ptr = GetProcAddress(handle, symbol);
|
||||||
|
if (func_ptr == NULL)
|
||||||
|
{
|
||||||
|
THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
|
||||||
|
}
|
||||||
|
return func_ptr;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
// Windows part end
|
||||||
|
|
||||||
|
|
||||||
#define LOCK_CLASS(env, clazz, classname) \
|
#define LOCK_CLASS(env, clazz, classname) \
|
||||||
if ((*env)->MonitorEnter(env, clazz) != 0) { \
|
if ((*env)->MonitorEnter(env, clazz) != 0) { \
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include "org_apache_hadoop.h"
|
||||||
|
|
||||||
#include "bulk_crc32.h"
|
#include "bulk_crc32.h"
|
||||||
|
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
|
@ -0,0 +1,893 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
#include <errno.h>
|
||||||
|
|
||||||
|
enum CHMOD_WHO
|
||||||
|
{
|
||||||
|
CHMOD_WHO_NONE = 0,
|
||||||
|
CHMOD_WHO_OTHER = 07,
|
||||||
|
CHMOD_WHO_GROUP = 070,
|
||||||
|
CHMOD_WHO_USER = 0700,
|
||||||
|
CHMOD_WHO_ALL = CHMOD_WHO_OTHER | CHMOD_WHO_GROUP | CHMOD_WHO_USER
|
||||||
|
};
|
||||||
|
|
||||||
|
enum CHMOD_OP
|
||||||
|
{
|
||||||
|
CHMOD_OP_INVALID,
|
||||||
|
CHMOD_OP_PLUS,
|
||||||
|
CHMOD_OP_MINUS,
|
||||||
|
CHMOD_OP_EQUAL,
|
||||||
|
};
|
||||||
|
|
||||||
|
enum CHMOD_PERM
|
||||||
|
{
|
||||||
|
CHMOD_PERM_NA = 00,
|
||||||
|
CHMOD_PERM_R = 01,
|
||||||
|
CHMOD_PERM_W = 02,
|
||||||
|
CHMOD_PERM_X = 04,
|
||||||
|
CHMOD_PERM_LX = 010,
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* We use the following struct to build a linked list of mode change actions.
|
||||||
|
* The mode is described by the following grammar:
|
||||||
|
* mode ::= clause [, clause ...]
|
||||||
|
* clause ::= [who ...] [action ...]
|
||||||
|
* action ::= op [perm ...] | op [ref]
|
||||||
|
* who ::= a | u | g | o
|
||||||
|
* op ::= + | - | =
|
||||||
|
* perm ::= r | w | x | X
|
||||||
|
* ref ::= u | g | o
|
||||||
|
*/
|
||||||
|
typedef struct _MODE_CHANGE_ACTION
|
||||||
|
{
|
||||||
|
USHORT who;
|
||||||
|
USHORT op;
|
||||||
|
USHORT perm;
|
||||||
|
USHORT ref;
|
||||||
|
struct _MODE_CHANGE_ACTION *next_action;
|
||||||
|
} MODE_CHANGE_ACTION, *PMODE_CHANGE_ACTION;
|
||||||
|
|
||||||
|
const MODE_CHANGE_ACTION INIT_MODE_CHANGE_ACTION = {
|
||||||
|
CHMOD_WHO_NONE, CHMOD_OP_INVALID, CHMOD_PERM_NA, CHMOD_WHO_NONE, NULL
|
||||||
|
};
|
||||||
|
|
||||||
|
static BOOL ParseOctalMode(LPCWSTR tsMask, INT *uMask);
|
||||||
|
|
||||||
|
static BOOL ParseMode(LPCWSTR modeString, PMODE_CHANGE_ACTION *actions);
|
||||||
|
|
||||||
|
static BOOL FreeActions(PMODE_CHANGE_ACTION actions);
|
||||||
|
|
||||||
|
static BOOL ParseCommandLineArguments(__in int argc, __in wchar_t *argv[],
|
||||||
|
__out BOOL *rec, __out_opt INT *mask,
|
||||||
|
__out_opt PMODE_CHANGE_ACTION *actions, __out LPCWSTR *path);
|
||||||
|
|
||||||
|
static BOOL ChangeFileModeByActions(__in LPCWSTR path,
|
||||||
|
PMODE_CHANGE_ACTION actions);
|
||||||
|
|
||||||
|
static BOOL ChangeFileMode(__in LPCWSTR path, __in_opt INT mode,
|
||||||
|
__in_opt PMODE_CHANGE_ACTION actions);
|
||||||
|
|
||||||
|
static BOOL ChangeFileModeRecursively(__in LPCWSTR path, __in_opt INT mode,
|
||||||
|
__in_opt PMODE_CHANGE_ACTION actions);
|
||||||
|
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Chmod
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// The main method for chmod command
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// 0: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
//
|
||||||
|
int Chmod(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
LPWSTR pathName = NULL;
|
||||||
|
LPWSTR longPathName = NULL;
|
||||||
|
|
||||||
|
BOOL recursive = FALSE;
|
||||||
|
|
||||||
|
PMODE_CHANGE_ACTION actions = NULL;
|
||||||
|
|
||||||
|
INT unixAccessMask = 0;
|
||||||
|
|
||||||
|
DWORD dwRtnCode = 0;
|
||||||
|
|
||||||
|
int ret = EXIT_FAILURE;
|
||||||
|
|
||||||
|
// Parsing chmod arguments
|
||||||
|
//
|
||||||
|
if (!ParseCommandLineArguments(argc, argv,
|
||||||
|
&recursive, &unixAccessMask, &actions, &pathName))
|
||||||
|
{
|
||||||
|
fwprintf(stderr, L"Incorrect command line arguments.\n\n");
|
||||||
|
ChmodUsage(argv[0]);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the path the the long path
|
||||||
|
//
|
||||||
|
dwRtnCode = ConvertToLongPath(pathName, &longPathName);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"ConvertToLongPath", dwRtnCode);
|
||||||
|
goto ChmodEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!recursive)
|
||||||
|
{
|
||||||
|
if (ChangeFileMode(longPathName, unixAccessMask, actions))
|
||||||
|
{
|
||||||
|
ret = EXIT_SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (ChangeFileModeRecursively(longPathName, unixAccessMask, actions))
|
||||||
|
{
|
||||||
|
ret = EXIT_SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ChmodEnd:
|
||||||
|
FreeActions(actions);
|
||||||
|
LocalFree(longPathName);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ChangeFileMode
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Wrapper function for change file mode. Choose either change by action or by
|
||||||
|
// access mask.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
//
|
||||||
|
static BOOL ChangeFileMode(__in LPCWSTR path, __in_opt INT unixAccessMask,
|
||||||
|
__in_opt PMODE_CHANGE_ACTION actions)
|
||||||
|
{
|
||||||
|
if (actions != NULL)
|
||||||
|
return ChangeFileModeByActions(path, actions);
|
||||||
|
else
|
||||||
|
{
|
||||||
|
DWORD dwRtnCode = ChangeFileModeByMask(path, unixAccessMask);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"ChangeFileModeByMask", dwRtnCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ChangeFileModeRecursively
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Travel the directory recursively to change the permissions.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// The recursion works in the following way:
|
||||||
|
// - If the path is not a directory, change its mode and return.
|
||||||
|
// Symbolic links and junction points are not considered as directories.
|
||||||
|
// - Otherwise, call the method on all its children, then change its mode.
|
||||||
|
//
|
||||||
|
static BOOL ChangeFileModeRecursively(__in LPCWSTR path, __in_opt INT mode,
|
||||||
|
__in_opt PMODE_CHANGE_ACTION actions)
|
||||||
|
{
|
||||||
|
BOOL isDir = FALSE;
|
||||||
|
BOOL isSymlink = FALSE;
|
||||||
|
LPWSTR dir = NULL;
|
||||||
|
|
||||||
|
size_t pathSize = 0;
|
||||||
|
size_t dirSize = 0;
|
||||||
|
|
||||||
|
HANDLE hFind = INVALID_HANDLE_VALUE;
|
||||||
|
WIN32_FIND_DATA ffd;
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
BOOL ret = FALSE;
|
||||||
|
|
||||||
|
if ((dwRtnCode = DirectoryCheck(path, &isDir)) != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"IsDirectory", dwRtnCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
if ((dwRtnCode = SymbolicLinkCheck(path, &isSymlink)) != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"IsSymbolicLink", dwRtnCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isSymlink || !isDir)
|
||||||
|
{
|
||||||
|
if (ChangeFileMode(path, mode, actions))
|
||||||
|
return TRUE;
|
||||||
|
else
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (FAILED(StringCchLengthW(path, STRSAFE_MAX_CCH - 3, &pathSize)))
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
dirSize = pathSize + 3;
|
||||||
|
dir = (LPWSTR)LocalAlloc(LPTR, dirSize * sizeof(WCHAR));
|
||||||
|
if (dir == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (FAILED(StringCchCopyW(dir, dirSize, path)) ||
|
||||||
|
FAILED(StringCchCatW(dir, dirSize, L"\\*")))
|
||||||
|
{
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
hFind = FindFirstFile(dir, &ffd);
|
||||||
|
if (hFind == INVALID_HANDLE_VALUE)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"FindFirstFile", GetLastError());
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
do
|
||||||
|
{
|
||||||
|
LPWSTR filename = NULL;
|
||||||
|
LPWSTR longFilename = NULL;
|
||||||
|
size_t filenameSize = 0;
|
||||||
|
|
||||||
|
if (wcscmp(ffd.cFileName, L".") == 0 ||
|
||||||
|
wcscmp(ffd.cFileName, L"..") == 0)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
filenameSize = pathSize + wcslen(ffd.cFileName) + 2;
|
||||||
|
filename = (LPWSTR)LocalAlloc(LPTR, filenameSize * sizeof(WCHAR));
|
||||||
|
if (filename == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (FAILED(StringCchCopyW(filename, filenameSize, path)) ||
|
||||||
|
FAILED(StringCchCatW(filename, filenameSize, L"\\")) ||
|
||||||
|
FAILED(StringCchCatW(filename, filenameSize, ffd.cFileName)))
|
||||||
|
{
|
||||||
|
LocalFree(filename);
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The child fileanme is not prepended with long path prefix.
|
||||||
|
// Convert the filename to long path format.
|
||||||
|
//
|
||||||
|
dwRtnCode = ConvertToLongPath(filename, &longFilename);
|
||||||
|
LocalFree(filename);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"ConvertToLongPath", dwRtnCode);
|
||||||
|
LocalFree(longFilename);
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!ChangeFileModeRecursively(longFilename, mode, actions))
|
||||||
|
{
|
||||||
|
LocalFree(longFilename);
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalFree(longFilename);
|
||||||
|
|
||||||
|
} while (FindNextFileW(hFind, &ffd));
|
||||||
|
|
||||||
|
if (!ChangeFileMode(path, mode, actions))
|
||||||
|
{
|
||||||
|
goto ChangeFileModeRecursivelyEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = TRUE;
|
||||||
|
|
||||||
|
ChangeFileModeRecursivelyEnd:
|
||||||
|
LocalFree(dir);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseCommandLineArguments
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Parse command line arguments for chmod.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// 1. Recursive is only set on directories
|
||||||
|
// 2. 'actions' is NULL if the mode is octal
|
||||||
|
//
|
||||||
|
static BOOL ParseCommandLineArguments(__in int argc, __in wchar_t *argv[],
|
||||||
|
__out BOOL *rec,
|
||||||
|
__out_opt INT *mask,
|
||||||
|
__out_opt PMODE_CHANGE_ACTION *actions,
|
||||||
|
__out LPCWSTR *path)
|
||||||
|
{
|
||||||
|
LPCWSTR maskString;
|
||||||
|
BY_HANDLE_FILE_INFORMATION fileInfo;
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
assert(path != NULL);
|
||||||
|
|
||||||
|
if (argc != 3 && argc != 4)
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
*rec = FALSE;
|
||||||
|
if (argc == 4)
|
||||||
|
{
|
||||||
|
maskString = argv[2];
|
||||||
|
*path = argv[3];
|
||||||
|
|
||||||
|
if (wcscmp(argv[1], L"-R") == 0)
|
||||||
|
{
|
||||||
|
// Check if the given path name is a file or directory
|
||||||
|
// Only set recursive flag if the given path is a directory
|
||||||
|
//
|
||||||
|
dwRtnCode = GetFileInformationByName(*path, FALSE, &fileInfo);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetFileInformationByName", dwRtnCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (IsDirFileInfo(&fileInfo))
|
||||||
|
{
|
||||||
|
*rec = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
maskString = argv[1];
|
||||||
|
*path = argv[2];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ParseOctalMode(maskString, mask))
|
||||||
|
{
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
else if (ParseMode(maskString, actions))
|
||||||
|
{
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: FreeActions
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Free a linked list of mode change actions given the head node.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// none
|
||||||
|
//
|
||||||
|
static BOOL FreeActions(PMODE_CHANGE_ACTION actions)
|
||||||
|
{
|
||||||
|
PMODE_CHANGE_ACTION curr = NULL;
|
||||||
|
PMODE_CHANGE_ACTION next = NULL;
|
||||||
|
|
||||||
|
// Nothing to free if NULL is passed in
|
||||||
|
//
|
||||||
|
if (actions == NULL)
|
||||||
|
{
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
curr = actions;
|
||||||
|
while (curr != NULL)
|
||||||
|
{
|
||||||
|
next = curr->next_action;
|
||||||
|
LocalFree(curr);
|
||||||
|
curr = next;
|
||||||
|
}
|
||||||
|
actions = NULL;
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ComputeNewMode
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Compute a new mode based on the old mode and a mode change action.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// The newly computed mode
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// Apply 'rwx' permission mask or reference permission mode according to the
|
||||||
|
// '+', '-', or '=' operator.
|
||||||
|
//
|
||||||
|
static INT ComputeNewMode(__in INT oldMode,
|
||||||
|
__in USHORT who, __in USHORT op,
|
||||||
|
__in USHORT perm, __in USHORT ref)
|
||||||
|
{
|
||||||
|
static const INT readMask = 0444;
|
||||||
|
static const INT writeMask = 0222;
|
||||||
|
static const INT exeMask = 0111;
|
||||||
|
|
||||||
|
INT mask = 0;
|
||||||
|
INT mode = 0;
|
||||||
|
|
||||||
|
// Operations are exclusive, and cannot be invalid
|
||||||
|
//
|
||||||
|
assert(op == CHMOD_OP_EQUAL || op == CHMOD_OP_PLUS || op == CHMOD_OP_MINUS);
|
||||||
|
|
||||||
|
// Nothing needs to be changed if there is not permission or reference
|
||||||
|
//
|
||||||
|
if(perm == CHMOD_PERM_NA && ref == CHMOD_WHO_NONE)
|
||||||
|
{
|
||||||
|
return oldMode;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We should have only permissions or a reference target, not both.
|
||||||
|
//
|
||||||
|
assert((perm != CHMOD_PERM_NA && ref == CHMOD_WHO_NONE) ||
|
||||||
|
(perm == CHMOD_PERM_NA && ref != CHMOD_WHO_NONE));
|
||||||
|
|
||||||
|
if (perm != CHMOD_PERM_NA)
|
||||||
|
{
|
||||||
|
if ((perm & CHMOD_PERM_R) == CHMOD_PERM_R)
|
||||||
|
mask |= readMask;
|
||||||
|
if ((perm & CHMOD_PERM_W) == CHMOD_PERM_W)
|
||||||
|
mask |= writeMask;
|
||||||
|
if ((perm & CHMOD_PERM_X) == CHMOD_PERM_X)
|
||||||
|
mask |= exeMask;
|
||||||
|
if (((perm & CHMOD_PERM_LX) == CHMOD_PERM_LX))
|
||||||
|
{
|
||||||
|
// It applies execute permissions to directories regardless of their
|
||||||
|
// current permissions and applies execute permissions to a file which
|
||||||
|
// already has at least 1 execute permission bit already set (either user,
|
||||||
|
// group or other). It is only really useful when used with '+' and
|
||||||
|
// usually in combination with the -R option for giving group or other
|
||||||
|
// access to a big directory tree without setting execute permission on
|
||||||
|
// normal files (such as text files), which would normally happen if you
|
||||||
|
// just used "chmod -R a+rx .", whereas with 'X' you can do
|
||||||
|
// "chmod -R a+rX ." instead (Source: Wikipedia)
|
||||||
|
//
|
||||||
|
if ((oldMode & UX_DIRECTORY) == UX_DIRECTORY || (oldMode & exeMask))
|
||||||
|
mask |= exeMask;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (ref != CHMOD_WHO_NONE)
|
||||||
|
{
|
||||||
|
mask |= oldMode & ref;
|
||||||
|
switch(ref)
|
||||||
|
{
|
||||||
|
case CHMOD_WHO_GROUP:
|
||||||
|
mask |= mask >> 3;
|
||||||
|
mask |= mask << 3;
|
||||||
|
break;
|
||||||
|
case CHMOD_WHO_OTHER:
|
||||||
|
mask |= mask << 3;
|
||||||
|
mask |= mask << 6;
|
||||||
|
break;
|
||||||
|
case CHMOD_WHO_USER:
|
||||||
|
mask |= mask >> 3;
|
||||||
|
mask |= mask >> 6;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
// Reference modes can only be U/G/O and are exclusive
|
||||||
|
assert(FALSE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mask &= who;
|
||||||
|
|
||||||
|
if (op == CHMOD_OP_EQUAL)
|
||||||
|
{
|
||||||
|
mode = (oldMode & (~who)) | mask;
|
||||||
|
}
|
||||||
|
else if (op == CHMOD_OP_MINUS)
|
||||||
|
{
|
||||||
|
mode = oldMode & (~mask);
|
||||||
|
}
|
||||||
|
else if (op == CHMOD_OP_PLUS)
|
||||||
|
{
|
||||||
|
mode = oldMode | mask;
|
||||||
|
}
|
||||||
|
|
||||||
|
return mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ConvertActionsToMask
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Convert a linked list of mode change actions to the Unix permission mask
|
||||||
|
// given the head node.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// none
|
||||||
|
//
|
||||||
|
static BOOL ConvertActionsToMask(__in LPCWSTR path,
|
||||||
|
__in PMODE_CHANGE_ACTION actions, __out PINT puMask)
|
||||||
|
{
|
||||||
|
PMODE_CHANGE_ACTION curr = NULL;
|
||||||
|
|
||||||
|
BY_HANDLE_FILE_INFORMATION fileInformation;
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
INT mode = 0;
|
||||||
|
|
||||||
|
dwErrorCode = GetFileInformationByName(path, FALSE, &fileInformation);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetFileInformationByName", dwErrorCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
if (IsDirFileInfo(&fileInformation))
|
||||||
|
{
|
||||||
|
mode |= UX_DIRECTORY;
|
||||||
|
}
|
||||||
|
dwErrorCode = FindFileOwnerAndPermission(path, NULL, NULL, &mode);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"FindFileOwnerAndPermission", dwErrorCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
*puMask = mode;
|
||||||
|
|
||||||
|
// Nothing to change if NULL is passed in
|
||||||
|
//
|
||||||
|
if (actions == NULL)
|
||||||
|
{
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (curr = actions; curr != NULL; curr = curr->next_action)
|
||||||
|
{
|
||||||
|
mode = ComputeNewMode(mode, curr->who, curr->op, curr->perm, curr->ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
*puMask = mode;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ChangeFileModeByActions
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Change a file mode through a list of actions.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// none
|
||||||
|
//
|
||||||
|
static BOOL ChangeFileModeByActions(__in LPCWSTR path,
|
||||||
|
PMODE_CHANGE_ACTION actions)
|
||||||
|
{
|
||||||
|
INT mask = 0;
|
||||||
|
|
||||||
|
if (ConvertActionsToMask(path, actions, &mask))
|
||||||
|
{
|
||||||
|
DWORD dwRtnCode = ChangeFileModeByMask(path, mask);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"ChangeFileModeByMask", dwRtnCode);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseMode
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Convert a mode string into a linked list of actions
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// Take a state machine approach to parse the mode. Each mode change action
|
||||||
|
// will be a node in the output linked list. The state machine has five state,
|
||||||
|
// and each will only transit to the next; the end state can transit back to
|
||||||
|
// the first state, and thus form a circle. In each state, if we see a
|
||||||
|
// a character not belongs to the state, we will move to next state. WHO, PERM,
|
||||||
|
// and REF states are optional; OP and END states are required; and errors
|
||||||
|
// will only be reported at the latter two states.
|
||||||
|
//
|
||||||
|
static BOOL ParseMode(LPCWSTR modeString, PMODE_CHANGE_ACTION *pActions)
|
||||||
|
{
|
||||||
|
enum __PARSE_MODE_ACTION_STATE
|
||||||
|
{
|
||||||
|
PARSE_MODE_ACTION_WHO_STATE,
|
||||||
|
PARSE_MODE_ACTION_OP_STATE,
|
||||||
|
PARSE_MODE_ACTION_PERM_STATE,
|
||||||
|
PARSE_MODE_ACTION_REF_STATE,
|
||||||
|
PARSE_MODE_ACTION_END_STATE
|
||||||
|
} state = PARSE_MODE_ACTION_WHO_STATE;
|
||||||
|
|
||||||
|
MODE_CHANGE_ACTION action = INIT_MODE_CHANGE_ACTION;
|
||||||
|
PMODE_CHANGE_ACTION actionsEnd = NULL;
|
||||||
|
PMODE_CHANGE_ACTION actionsLast = NULL;
|
||||||
|
USHORT lastWho;
|
||||||
|
WCHAR c = 0;
|
||||||
|
size_t len = 0;
|
||||||
|
size_t i = 0;
|
||||||
|
|
||||||
|
assert(modeString != NULL && pActions != NULL);
|
||||||
|
|
||||||
|
if (FAILED(StringCchLengthW(modeString, STRSAFE_MAX_CCH, &len)))
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
actionsEnd = *pActions;
|
||||||
|
while(i <= len)
|
||||||
|
{
|
||||||
|
c = modeString[i];
|
||||||
|
if (state == PARSE_MODE_ACTION_WHO_STATE)
|
||||||
|
{
|
||||||
|
switch (c)
|
||||||
|
{
|
||||||
|
case L'a':
|
||||||
|
action.who |= CHMOD_WHO_ALL;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'u':
|
||||||
|
action.who |= CHMOD_WHO_USER;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'g':
|
||||||
|
action.who |= CHMOD_WHO_GROUP;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'o':
|
||||||
|
action.who |= CHMOD_WHO_OTHER;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
state = PARSE_MODE_ACTION_OP_STATE;
|
||||||
|
} // WHO switch
|
||||||
|
}
|
||||||
|
else if (state == PARSE_MODE_ACTION_OP_STATE)
|
||||||
|
{
|
||||||
|
switch (c)
|
||||||
|
{
|
||||||
|
case L'+':
|
||||||
|
action.op = CHMOD_OP_PLUS;
|
||||||
|
break;
|
||||||
|
case L'-':
|
||||||
|
action.op = CHMOD_OP_MINUS;
|
||||||
|
break;
|
||||||
|
case L'=':
|
||||||
|
action.op = CHMOD_OP_EQUAL;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
fwprintf(stderr, L"Invalid mode: '%s'\n", modeString);
|
||||||
|
FreeActions(*pActions);
|
||||||
|
return FALSE;
|
||||||
|
} // OP switch
|
||||||
|
i++;
|
||||||
|
state = PARSE_MODE_ACTION_PERM_STATE;
|
||||||
|
}
|
||||||
|
else if (state == PARSE_MODE_ACTION_PERM_STATE)
|
||||||
|
{
|
||||||
|
switch (c)
|
||||||
|
{
|
||||||
|
case L'r':
|
||||||
|
action.perm |= CHMOD_PERM_R;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'w':
|
||||||
|
action.perm |= CHMOD_PERM_W;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'x':
|
||||||
|
action.perm |= CHMOD_PERM_X;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'X':
|
||||||
|
action.perm |= CHMOD_PERM_LX;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
state = PARSE_MODE_ACTION_REF_STATE;
|
||||||
|
} // PERM switch
|
||||||
|
}
|
||||||
|
else if (state == PARSE_MODE_ACTION_REF_STATE)
|
||||||
|
{
|
||||||
|
switch (c)
|
||||||
|
{
|
||||||
|
case L'u':
|
||||||
|
action.ref = CHMOD_WHO_USER;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'g':
|
||||||
|
action.ref = CHMOD_WHO_GROUP;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
case L'o':
|
||||||
|
action.ref = CHMOD_WHO_OTHER;
|
||||||
|
i++;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
state = PARSE_MODE_ACTION_END_STATE;
|
||||||
|
} // REF switch
|
||||||
|
}
|
||||||
|
else if (state == PARSE_MODE_ACTION_END_STATE)
|
||||||
|
{
|
||||||
|
switch (c)
|
||||||
|
{
|
||||||
|
case NULL:
|
||||||
|
case L',':
|
||||||
|
i++;
|
||||||
|
case L'+':
|
||||||
|
case L'-':
|
||||||
|
case L'=':
|
||||||
|
state = PARSE_MODE_ACTION_WHO_STATE;
|
||||||
|
|
||||||
|
// Append the current action to the end of the linked list
|
||||||
|
//
|
||||||
|
assert(actionsEnd == NULL);
|
||||||
|
// Allocate memory
|
||||||
|
actionsEnd = (PMODE_CHANGE_ACTION) LocalAlloc(LPTR,
|
||||||
|
sizeof(MODE_CHANGE_ACTION));
|
||||||
|
if (actionsEnd == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
FreeActions(*pActions);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
if (action.who == CHMOD_WHO_NONE) action.who = CHMOD_WHO_ALL;
|
||||||
|
// Copy the action to the new node
|
||||||
|
*actionsEnd = action;
|
||||||
|
// Append to the last node in the linked list
|
||||||
|
if (actionsLast != NULL) actionsLast->next_action = actionsEnd;
|
||||||
|
// pActions should point to the head of the linked list
|
||||||
|
if (*pActions == NULL) *pActions = actionsEnd;
|
||||||
|
// Update the two pointers to point to the last node and the tail
|
||||||
|
actionsLast = actionsEnd;
|
||||||
|
actionsEnd = actionsLast->next_action;
|
||||||
|
|
||||||
|
// Reset action
|
||||||
|
//
|
||||||
|
lastWho = action.who;
|
||||||
|
action = INIT_MODE_CHANGE_ACTION;
|
||||||
|
if (c != L',')
|
||||||
|
{
|
||||||
|
action.who = lastWho;
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
fwprintf(stderr, L"Invalid mode: '%s'\n", modeString);
|
||||||
|
FreeActions(*pActions);
|
||||||
|
return FALSE;
|
||||||
|
} // END switch
|
||||||
|
}
|
||||||
|
} // while
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseOctalMode
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Convert the 3 or 4 digits Unix mask string into the binary representation
|
||||||
|
// of the Unix access mask, i.e. 9 bits each an indicator of the permission
|
||||||
|
// of 'rwxrwxrwx', i.e. user's, group's, and owner's read, write, and
|
||||||
|
// execute/search permissions.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
// FALSE: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// none
|
||||||
|
//
|
||||||
|
static BOOL ParseOctalMode(LPCWSTR tsMask, INT *uMask)
|
||||||
|
{
|
||||||
|
size_t tsMaskLen = 0;
|
||||||
|
DWORD i;
|
||||||
|
LONG l;
|
||||||
|
WCHAR *end;
|
||||||
|
|
||||||
|
if (uMask == NULL)
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
if (FAILED(StringCchLengthW(tsMask, STRSAFE_MAX_CCH, &tsMaskLen)))
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
if (tsMaskLen == 0 || tsMaskLen > 4)
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < tsMaskLen; i++)
|
||||||
|
{
|
||||||
|
if (!(tsMask[tsMaskLen - i - 1] >= L'0' &&
|
||||||
|
tsMask[tsMaskLen - i - 1] <= L'7'))
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
errno = 0;
|
||||||
|
if (tsMaskLen == 4)
|
||||||
|
// Windows does not have any equivalent of setuid/setgid and sticky bit.
|
||||||
|
// So the first bit is omitted for the 4 digit octal mode case.
|
||||||
|
//
|
||||||
|
l = wcstol(tsMask + 1, &end, 8);
|
||||||
|
else
|
||||||
|
l = wcstol(tsMask, &end, 8);
|
||||||
|
|
||||||
|
if (errno || l > 0x0777 || l < 0 || *end != 0)
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
*uMask = (INT) l;
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ChmodUsage(LPCWSTR program)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: %s [OPTION] OCTAL-MODE [FILE]\n\
|
||||||
|
or: %s [OPTION] MODE [FILE]\n\
|
||||||
|
Change the mode of the FILE to MODE.\n\
|
||||||
|
\n\
|
||||||
|
-R: change files and directories recursively\n\
|
||||||
|
\n\
|
||||||
|
Each MODE is of the form '[ugoa]*([-+=]([rwxX]*|[ugo]))+'.\n",
|
||||||
|
program, program);
|
||||||
|
}
|
|
@ -0,0 +1,270 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ChangeFileOwnerBySid
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Change a file or directory ownership by giving new owner and group SIDs
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: on success
|
||||||
|
// Error code: otherwise
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// This function is long path safe, i.e. the path will be converted to long
|
||||||
|
// path format if not already converted. So the caller does not need to do
|
||||||
|
// the converstion before calling the method.
|
||||||
|
//
|
||||||
|
static DWORD ChangeFileOwnerBySid(__in LPCWSTR path,
|
||||||
|
__in_opt PSID pNewOwnerSid, __in_opt PSID pNewGroupSid)
|
||||||
|
{
|
||||||
|
LPWSTR longPathName = NULL;
|
||||||
|
INT oldMode = 0;
|
||||||
|
|
||||||
|
SECURITY_INFORMATION securityInformation = 0;
|
||||||
|
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
// Convert the path the the long path
|
||||||
|
//
|
||||||
|
dwRtnCode = ConvertToLongPath(path, &longPathName);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto ChangeFileOwnerByNameEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a pointer to the existing owner information and DACL
|
||||||
|
//
|
||||||
|
dwRtnCode = FindFileOwnerAndPermission(longPathName, NULL, NULL, &oldMode);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto ChangeFileOwnerByNameEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need SeTakeOwnershipPrivilege to set the owner if the caller does not
|
||||||
|
// have WRITE_OWNER access to the object; we need SeRestorePrivilege if the
|
||||||
|
// SID is not contained in the caller's token, and have the SE_GROUP_OWNER
|
||||||
|
// permission enabled.
|
||||||
|
//
|
||||||
|
if (!EnablePrivilege(L"SeTakeOwnershipPrivilege"))
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"INFO: The user does not have SeTakeOwnershipPrivilege.\n");
|
||||||
|
}
|
||||||
|
if (!EnablePrivilege(L"SeRestorePrivilege"))
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"INFO: The user does not have SeRestorePrivilege.\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(pNewOwnerSid != NULL || pNewGroupSid != NULL);
|
||||||
|
|
||||||
|
// Set the owners of the file.
|
||||||
|
//
|
||||||
|
if (pNewOwnerSid != NULL) securityInformation |= OWNER_SECURITY_INFORMATION;
|
||||||
|
if (pNewGroupSid != NULL) securityInformation |= GROUP_SECURITY_INFORMATION;
|
||||||
|
dwRtnCode = SetNamedSecurityInfoW(
|
||||||
|
longPathName,
|
||||||
|
SE_FILE_OBJECT,
|
||||||
|
securityInformation,
|
||||||
|
pNewOwnerSid,
|
||||||
|
pNewGroupSid,
|
||||||
|
NULL,
|
||||||
|
NULL);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto ChangeFileOwnerByNameEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the permission on the file for the new owner.
|
||||||
|
//
|
||||||
|
dwRtnCode = ChangeFileModeByMask(longPathName, oldMode);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto ChangeFileOwnerByNameEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
ChangeFileOwnerByNameEnd:
|
||||||
|
LocalFree(longPathName);
|
||||||
|
return dwRtnCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Chown
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// The main method for chown command
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// 0: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
//
|
||||||
|
//
|
||||||
|
int Chown(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
LPWSTR pathName = NULL;
|
||||||
|
|
||||||
|
LPWSTR ownerInfo = NULL;
|
||||||
|
|
||||||
|
LPWSTR colonPos = NULL;
|
||||||
|
|
||||||
|
LPWSTR userName = NULL;
|
||||||
|
size_t userNameLen = 0;
|
||||||
|
|
||||||
|
LPWSTR groupName = NULL;
|
||||||
|
size_t groupNameLen = 0;
|
||||||
|
|
||||||
|
PSID pNewOwnerSid = NULL;
|
||||||
|
PSID pNewGroupSid = NULL;
|
||||||
|
|
||||||
|
DWORD dwRtnCode = 0;
|
||||||
|
|
||||||
|
int ret = EXIT_FAILURE;
|
||||||
|
|
||||||
|
if (argc >= 3)
|
||||||
|
{
|
||||||
|
ownerInfo = argv[1];
|
||||||
|
pathName = argv[2];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
fwprintf(stderr, L"Incorrect command line arguments.\n\n");
|
||||||
|
ChownUsage(argv[0]);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parsing the owner name
|
||||||
|
//
|
||||||
|
if ((colonPos = wcschr(ownerInfo, L':')) != NULL)
|
||||||
|
{
|
||||||
|
if (colonPos - ownerInfo != 0)
|
||||||
|
{
|
||||||
|
// Length includes NULL terminator
|
||||||
|
userNameLen = colonPos - ownerInfo + 1;
|
||||||
|
userName = (LPTSTR)LocalAlloc(LPTR, userNameLen * sizeof(WCHAR));
|
||||||
|
if (userName == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
if (FAILED(StringCchCopyNW(userName, userNameLen,
|
||||||
|
ownerInfo, userNameLen - 1)))
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (*(colonPos + 1) != 0)
|
||||||
|
{
|
||||||
|
// Length includes NULL terminator
|
||||||
|
groupNameLen = wcslen(ownerInfo) - (colonPos - ownerInfo) + 1;
|
||||||
|
groupName = (LPTSTR)LocalAlloc(LPTR, groupNameLen * sizeof(WCHAR));
|
||||||
|
if (groupName == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
if (FAILED(StringCchCopyNW(groupName, groupNameLen,
|
||||||
|
colonPos + 1, groupNameLen)))
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Length includes NULL terminator
|
||||||
|
userNameLen = wcslen(ownerInfo) + 1;
|
||||||
|
userName = (LPWSTR)LocalAlloc(LPTR, userNameLen * sizeof(WCHAR));
|
||||||
|
if (userName == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
if (FAILED(StringCchCopyNW(userName, userNameLen, ownerInfo, userNameLen)))
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not allow zero length user name or group name in the parsing results.
|
||||||
|
//
|
||||||
|
assert(userName == NULL || wcslen(userName) > 0);
|
||||||
|
assert(groupName == NULL || wcslen(groupName) > 0);
|
||||||
|
|
||||||
|
// Nothing to change if both names are empty
|
||||||
|
//
|
||||||
|
if ((userName == NULL) && (groupName == NULL))
|
||||||
|
{
|
||||||
|
ret = EXIT_SUCCESS;
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (userName != NULL)
|
||||||
|
{
|
||||||
|
dwRtnCode = GetSidFromAcctNameW(userName, &pNewOwnerSid);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetSidFromAcctName", dwRtnCode);
|
||||||
|
fwprintf(stderr, L"Invalid user name: %s\n", userName);
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (groupName != NULL)
|
||||||
|
{
|
||||||
|
dwRtnCode = GetSidFromAcctNameW(groupName, &pNewGroupSid);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetSidFromAcctName", dwRtnCode);
|
||||||
|
fwprintf(stderr, L"Invalid group name: %s\n", groupName);
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (wcslen(pathName) == 0 || wcsspn(pathName, L"/?|><:*\"") != 0)
|
||||||
|
{
|
||||||
|
fwprintf(stderr, L"Incorrect file name format: %s\n", pathName);
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwRtnCode = ChangeFileOwnerBySid(pathName, pNewOwnerSid, pNewGroupSid);
|
||||||
|
if (dwRtnCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"ChangeFileOwnerBySid", dwRtnCode);
|
||||||
|
goto ChownEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = EXIT_SUCCESS;
|
||||||
|
|
||||||
|
ChownEnd:
|
||||||
|
LocalFree(userName);
|
||||||
|
LocalFree(groupName);
|
||||||
|
LocalFree(pNewOwnerSid);
|
||||||
|
LocalFree(pNewGroupSid);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ChownUsage(LPCWSTR program)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: %s [OWNER][:[GROUP]] [FILE]\n\
|
||||||
|
Change the owner and/or group of the FILE to OWNER and/or GROUP.\n\
|
||||||
|
\n\
|
||||||
|
Note:\n\
|
||||||
|
On Linux, if a colon but no group name follows the user name, the group of\n\
|
||||||
|
the files is changed to that user\'s login group. Windows has no concept of\n\
|
||||||
|
a user's login group. So we do not change the group owner in this case.\n",
|
||||||
|
program);
|
||||||
|
}
|
|
@ -0,0 +1,217 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: PrintGroups
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Print group names to the console standard output for the given user
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// This function could fail on first pass when we fail to find groups for
|
||||||
|
// domain account; so we do not report Windows API errors in this function.
|
||||||
|
// If formatOutput is true, pipe character is used as separator for groups
|
||||||
|
// otherwise, space.
|
||||||
|
//
|
||||||
|
static BOOL PrintGroups(
|
||||||
|
LPLOCALGROUP_USERS_INFO_0 groups,
|
||||||
|
DWORD entries,
|
||||||
|
BOOL formatOutput)
|
||||||
|
{
|
||||||
|
BOOL ret = TRUE;
|
||||||
|
LPLOCALGROUP_USERS_INFO_0 pTmpBuf = groups;
|
||||||
|
DWORD i;
|
||||||
|
|
||||||
|
for (i = 0; i < entries; i++)
|
||||||
|
{
|
||||||
|
if (pTmpBuf == NULL)
|
||||||
|
{
|
||||||
|
ret = FALSE;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i != 0)
|
||||||
|
{
|
||||||
|
if (formatOutput)
|
||||||
|
{
|
||||||
|
wprintf(L"|");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
wprintf(L" ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
wprintf(L"%s", pTmpBuf->lgrui0_name);
|
||||||
|
|
||||||
|
pTmpBuf++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ret)
|
||||||
|
wprintf(L"\n");
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseCommandLine
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Parses the command line
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE on the valid command line, FALSE otherwise
|
||||||
|
//
|
||||||
|
static BOOL ParseCommandLine(
|
||||||
|
int argc, wchar_t *argv[], wchar_t **user, BOOL *formatOutput)
|
||||||
|
{
|
||||||
|
*formatOutput = FALSE;
|
||||||
|
|
||||||
|
assert(argv != NULL);
|
||||||
|
assert(user != NULL);
|
||||||
|
|
||||||
|
if (argc == 1)
|
||||||
|
{
|
||||||
|
// implicitly use the current user
|
||||||
|
*user = NULL;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
else if (argc == 2)
|
||||||
|
{
|
||||||
|
// check if the second argument is formating
|
||||||
|
if (wcscmp(argv[1], L"-F") == 0)
|
||||||
|
{
|
||||||
|
*user = NULL;
|
||||||
|
*formatOutput = TRUE;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
*user = argv[1];
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (argc == 3 && wcscmp(argv[1], L"-F") == 0)
|
||||||
|
{
|
||||||
|
// if 3 args, the second argument must be "-F"
|
||||||
|
|
||||||
|
*user = argv[2];
|
||||||
|
*formatOutput = TRUE;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Groups
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// The main method for groups command
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// 0: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
//
|
||||||
|
//
|
||||||
|
int Groups(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
LPWSTR input = NULL;
|
||||||
|
|
||||||
|
LPWSTR currentUser = NULL;
|
||||||
|
DWORD cchCurrentUser = 0;
|
||||||
|
|
||||||
|
LPLOCALGROUP_USERS_INFO_0 groups = NULL;
|
||||||
|
DWORD entries = 0;
|
||||||
|
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
int ret = EXIT_SUCCESS;
|
||||||
|
BOOL formatOutput = FALSE;
|
||||||
|
|
||||||
|
if (!ParseCommandLine(argc, argv, &input, &formatOutput))
|
||||||
|
{
|
||||||
|
fwprintf(stderr, L"Incorrect command line arguments.\n\n");
|
||||||
|
GroupsUsage(argv[0]);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if username was not specified on the command line, fallback to the
|
||||||
|
// current user
|
||||||
|
if (input == NULL)
|
||||||
|
{
|
||||||
|
GetUserNameW(currentUser, &cchCurrentUser);
|
||||||
|
if (GetLastError() == ERROR_INSUFFICIENT_BUFFER)
|
||||||
|
{
|
||||||
|
currentUser = (LPWSTR) LocalAlloc(LPTR,
|
||||||
|
(cchCurrentUser + 1) * sizeof(wchar_t));
|
||||||
|
if (!currentUser)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
ret = EXIT_FAILURE;
|
||||||
|
goto GroupsEnd;
|
||||||
|
}
|
||||||
|
if (GetUserNameW(currentUser, &cchCurrentUser))
|
||||||
|
input = currentUser;
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetUserName", GetLastError());
|
||||||
|
ret = EXIT_FAILURE;
|
||||||
|
goto GroupsEnd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetUserName", GetLastError());
|
||||||
|
ret = EXIT_FAILURE;
|
||||||
|
goto GroupsEnd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((dwRtnCode = GetLocalGroupsForUser(input, &groups, &entries))
|
||||||
|
!= ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetLocalGroupsForUser", dwRtnCode);
|
||||||
|
ret = EXIT_FAILURE;
|
||||||
|
goto GroupsEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!PrintGroups(groups, entries, formatOutput))
|
||||||
|
{
|
||||||
|
ret = EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
GroupsEnd:
|
||||||
|
LocalFree(currentUser);
|
||||||
|
if (groups != NULL) NetApiBufferFree(groups);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
void GroupsUsage(LPCWSTR program)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: %s [OPTIONS] [USERNAME]\n\
|
||||||
|
Print group information of the specified USERNAME \
|
||||||
|
(the current user by default).\n\
|
||||||
|
\n\
|
||||||
|
OPTIONS: -F format the output by separating tokens with a pipe\n",
|
||||||
|
program);
|
||||||
|
}
|
|
@ -0,0 +1,230 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
// List of different hardlink related command line options supported by
|
||||||
|
// winutils.
|
||||||
|
typedef enum HardLinkCommandOptionType
|
||||||
|
{
|
||||||
|
HardLinkInvalid,
|
||||||
|
HardLinkCreate,
|
||||||
|
HardLinkStat
|
||||||
|
} HardLinkCommandOption;
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseCommandLine
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Parses the given command line. On success, out param 'command' contains
|
||||||
|
// the user specified command.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: If the command line is valid
|
||||||
|
// FALSE: otherwise
|
||||||
|
static BOOL ParseCommandLine(__in int argc,
|
||||||
|
__in wchar_t *argv[],
|
||||||
|
__out HardLinkCommandOption *command)
|
||||||
|
{
|
||||||
|
*command = HardLinkInvalid;
|
||||||
|
|
||||||
|
if (argc != 3 && argc != 4) {
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argc == 3) {
|
||||||
|
if (wcscmp(argv[0], L"hardlink") != 0 || wcscmp(argv[1], L"stat") != 0)
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
*command = HardLinkStat;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argc == 4) {
|
||||||
|
if (wcscmp(argv[0], L"hardlink") != 0 || wcscmp(argv[1], L"create") != 0)
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
*command = HardLinkCreate;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(*command != HardLinkInvalid);
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: HardlinkStat
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Computes the number of hard links for a given file.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// error code: otherwise
|
||||||
|
static DWORD HardlinkStat(__in LPCWSTR fileName, __out DWORD *puHardLinkCount)
|
||||||
|
{
|
||||||
|
BY_HANDLE_FILE_INFORMATION fileInformation;
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
PWSTR longFileName = NULL;
|
||||||
|
|
||||||
|
// First convert input paths to long paths
|
||||||
|
//
|
||||||
|
dwErrorCode = ConvertToLongPath(fileName, &longFileName);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto HardlinkStatExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file information which contains the hard link count
|
||||||
|
//
|
||||||
|
dwErrorCode = GetFileInformationByName(longFileName, FALSE, &fileInformation);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto HardlinkStatExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
*puHardLinkCount = fileInformation.nNumberOfLinks;
|
||||||
|
|
||||||
|
HardlinkStatExit:
|
||||||
|
LocalFree(longFileName);
|
||||||
|
|
||||||
|
return dwErrorCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: HardlinkCreate
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Creates a hard link for a given file under the given name.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// error code: otherwise
|
||||||
|
static DWORD HardlinkCreate(__in LPCWSTR linkName, __in LPCWSTR fileName)
|
||||||
|
{
|
||||||
|
PWSTR longLinkName = NULL;
|
||||||
|
PWSTR longFileName = NULL;
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
// First convert input paths to long paths
|
||||||
|
//
|
||||||
|
dwErrorCode = ConvertToLongPath(linkName, &longLinkName);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto HardlinkCreateExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwErrorCode = ConvertToLongPath(fileName, &longFileName);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
goto HardlinkCreateExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the hard link
|
||||||
|
//
|
||||||
|
if (!CreateHardLink(longLinkName, longFileName, NULL))
|
||||||
|
{
|
||||||
|
dwErrorCode = GetLastError();
|
||||||
|
}
|
||||||
|
|
||||||
|
HardlinkCreateExit:
|
||||||
|
LocalFree(longLinkName);
|
||||||
|
LocalFree(longFileName);
|
||||||
|
|
||||||
|
return dwErrorCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Hardlink
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Creates a hard link for a given file under the given name. Outputs the
|
||||||
|
// appropriate information to stdout on success, or stderr on failure.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// EXIT_SUCCESS: On success
|
||||||
|
// EXIT_FAILURE: otherwise
|
||||||
|
int Hardlink(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
int ret = EXIT_FAILURE;
|
||||||
|
HardLinkCommandOption command = HardLinkInvalid;
|
||||||
|
|
||||||
|
if (!ParseCommandLine(argc, argv, &command)) {
|
||||||
|
dwErrorCode = ERROR_INVALID_COMMAND_LINE;
|
||||||
|
|
||||||
|
fwprintf(stderr, L"Incorrect command line arguments.\n\n");
|
||||||
|
HardlinkUsage();
|
||||||
|
goto HardLinkExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (command == HardLinkStat)
|
||||||
|
{
|
||||||
|
// Compute the number of hard links
|
||||||
|
//
|
||||||
|
DWORD uHardLinkCount = 0;
|
||||||
|
dwErrorCode = HardlinkStat(argv[2], &uHardLinkCount);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"HardlinkStat", dwErrorCode);
|
||||||
|
goto HardLinkExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the result
|
||||||
|
//
|
||||||
|
fwprintf(stdout, L"%d\n", uHardLinkCount);
|
||||||
|
|
||||||
|
} else if (command == HardLinkCreate)
|
||||||
|
{
|
||||||
|
// Create the hard link
|
||||||
|
//
|
||||||
|
dwErrorCode = HardlinkCreate(argv[2], argv[3]);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"HardlinkCreate", dwErrorCode);
|
||||||
|
goto HardLinkExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the success message
|
||||||
|
//
|
||||||
|
fwprintf(stdout, L"Hardlink created for %s <<===>> %s\n", argv[2], argv[3]);
|
||||||
|
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
// Should not happen
|
||||||
|
//
|
||||||
|
assert(FALSE);
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = EXIT_SUCCESS;
|
||||||
|
|
||||||
|
HardLinkExit:
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
void HardlinkUsage()
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: hardlink create [LINKNAME] [FILENAME] |\n\
|
||||||
|
hardlink stat [FILENAME]\n\
|
||||||
|
Creates a new hardlink on the existing file or displays the number of links\n\
|
||||||
|
for the given file\n");
|
||||||
|
}
|
|
@ -0,0 +1,142 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef UNICODE
|
||||||
|
#define UNICODE
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <assert.h>
|
||||||
|
#include <windows.h>
|
||||||
|
#include <aclapi.h>
|
||||||
|
#include <accctrl.h>
|
||||||
|
#include <tchar.h>
|
||||||
|
#include <strsafe.h>
|
||||||
|
#include <lm.h>
|
||||||
|
|
||||||
|
enum EXIT_CODE
|
||||||
|
{
|
||||||
|
/* Common success exit code shared among all utilities */
|
||||||
|
SUCCESS = EXIT_SUCCESS,
|
||||||
|
/* Generic failure exit code share among all utilities */
|
||||||
|
FAILURE = EXIT_FAILURE,
|
||||||
|
/* Failure code indicates the user does not privilege to create symlinks */
|
||||||
|
SYMLINK_NO_PRIVILEGE = 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The array of 12 months' three-letter abbreviations
|
||||||
|
*/
|
||||||
|
extern const LPCWSTR MONTHS[];
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The Unix masks
|
||||||
|
* The Windows version of <sys/stat.h> does not contain all the POSIX flag/mask
|
||||||
|
* definitions. The following masks are used in 'winutils' to represent POSIX
|
||||||
|
* permission mode.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
enum UnixAclMask
|
||||||
|
{
|
||||||
|
UX_O_EXECUTE = 00001, // S_IXOTH
|
||||||
|
UX_O_WRITE = 00002, // S_IWOTH
|
||||||
|
UX_O_READ = 00004, // S_IROTH
|
||||||
|
UX_G_EXECUTE = 00010, // S_IXGRP
|
||||||
|
UX_G_WRITE = 00020, // S_IWGRP
|
||||||
|
UX_G_READ = 00040, // S_IRGRP
|
||||||
|
UX_U_EXECUTE = 00100, // S_IXUSR
|
||||||
|
UX_U_WRITE = 00200, // S_IWUSR
|
||||||
|
UX_U_READ = 00400, // S_IRUSR
|
||||||
|
UX_DIRECTORY = 0040000, // S_IFDIR
|
||||||
|
UX_SYMLINK = 0120000, // S_IFLNK
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The WindowsAclMask and WinMasks contain the definitions used to establish
|
||||||
|
* the mapping between Unix and Windows.
|
||||||
|
*/
|
||||||
|
enum WindowsAclMask
|
||||||
|
{
|
||||||
|
WIN_READ, // The permission(s) that enable Unix read permission
|
||||||
|
WIN_WRITE, // The permission(s) that enable Unix write permission
|
||||||
|
WIN_EXECUTE, // The permission(s) that enbale Unix execute permission
|
||||||
|
WIN_OWNER_SE, // The permissions that are always set for file owners
|
||||||
|
WIN_ALL, // The permissions that all files on Windows should have
|
||||||
|
WIN_MASKS_TOTAL
|
||||||
|
};
|
||||||
|
extern const ACCESS_MASK WinMasks[];
|
||||||
|
|
||||||
|
|
||||||
|
int Ls(int argc, wchar_t *argv[]);
|
||||||
|
void LsUsage(LPCWSTR program);
|
||||||
|
|
||||||
|
int Chmod(int argc, wchar_t *argv[]);
|
||||||
|
void ChmodUsage(LPCWSTR program);
|
||||||
|
|
||||||
|
int Chown(int argc, wchar_t *argv[]);
|
||||||
|
void ChownUsage(LPCWSTR program);
|
||||||
|
|
||||||
|
int Groups(int argc, wchar_t *argv[]);
|
||||||
|
void GroupsUsage(LPCWSTR program);
|
||||||
|
|
||||||
|
int Hardlink(int argc, wchar_t *argv[]);
|
||||||
|
void HardlinkUsage();
|
||||||
|
|
||||||
|
int Task(int argc, wchar_t *argv[]);
|
||||||
|
void TaskUsage();
|
||||||
|
|
||||||
|
int Symlink(int argc, wchar_t *argv[]);
|
||||||
|
void SymlinkUsage();
|
||||||
|
|
||||||
|
int SystemInfo();
|
||||||
|
void SystemInfoUsage();
|
||||||
|
|
||||||
|
DWORD GetFileInformationByName(__in LPCWSTR pathName, __in BOOL followLink,
|
||||||
|
__out LPBY_HANDLE_FILE_INFORMATION lpFileInformation);
|
||||||
|
|
||||||
|
DWORD ConvertToLongPath(__in PCWSTR path, __deref_out PWSTR *newPath);
|
||||||
|
|
||||||
|
DWORD GetSidFromAcctNameW(LPCWSTR acctName, PSID* ppSid);
|
||||||
|
|
||||||
|
DWORD GetAccntNameFromSid(PSID pSid, LPWSTR *ppAcctName);
|
||||||
|
|
||||||
|
void ReportErrorCode(LPCWSTR func, DWORD err);
|
||||||
|
|
||||||
|
BOOL IsDirFileInfo(const BY_HANDLE_FILE_INFORMATION *fileInformation);
|
||||||
|
|
||||||
|
DWORD FindFileOwnerAndPermission(
|
||||||
|
__in LPCWSTR pathName,
|
||||||
|
__out_opt LPWSTR *pOwnerName,
|
||||||
|
__out_opt LPWSTR *pGroupName,
|
||||||
|
__out_opt PINT pMask);
|
||||||
|
|
||||||
|
DWORD DirectoryCheck(__in LPCWSTR pathName, __out LPBOOL result);
|
||||||
|
|
||||||
|
DWORD SymbolicLinkCheck(__in LPCWSTR pathName, __out LPBOOL result);
|
||||||
|
|
||||||
|
DWORD JunctionPointCheck(__in LPCWSTR pathName, __out LPBOOL result);
|
||||||
|
|
||||||
|
DWORD ChangeFileModeByMask(__in LPCWSTR path, INT mode);
|
||||||
|
|
||||||
|
DWORD GetLocalGroupsForUser(__in LPCWSTR user,
|
||||||
|
__out LPLOCALGROUP_USERS_INFO_0 *groups, __out LPDWORD entries);
|
||||||
|
|
||||||
|
BOOL EnablePrivilege(__in LPCWSTR privilegeName);
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,171 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup Label="ProjectConfigurations">
|
||||||
|
<ProjectConfiguration Include="Debug|Win32">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Debug|x64">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|Win32">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|x64">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
</ItemGroup>
|
||||||
|
<PropertyGroup Label="Globals">
|
||||||
|
<ProjectGuid>{12131AA7-902E-4a6d-9CE3-043261D22A12}</ProjectGuid>
|
||||||
|
<Keyword>Win32Proj</Keyword>
|
||||||
|
<RootNamespace>winutils</RootNamespace>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||||
|
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||||
|
<UseDebugLibraries>true</UseDebugLibraries>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||||
|
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||||
|
<UseDebugLibraries>true</UseDebugLibraries>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||||
|
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||||
|
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||||
|
<ImportGroup Label="ExtensionSettings">
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<PropertyGroup Label="UserMacros" />
|
||||||
|
<PropertyGroup>
|
||||||
|
<IncludePath>include;$(IncludePath)</IncludePath>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||||
|
<LinkIncremental>true</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||||
|
<LinkIncremental>true</LinkIncremental>
|
||||||
|
<OutDir />
|
||||||
|
<IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
<OutDir>..\..\..\target\bin\</OutDir>
|
||||||
|
<IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||||
|
<ClCompile>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<Optimization>Disabled</Optimization>
|
||||||
|
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||||
|
<ClCompile>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<WarningLevel>Level4</WarningLevel>
|
||||||
|
<Optimization>Disabled</Optimization>
|
||||||
|
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
<OptimizeReferences>true</OptimizeReferences>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
<OptimizeReferences>true</OptimizeReferences>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="libwinutils.c" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClInclude Include="include/winutils.h" />
|
||||||
|
</ItemGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||||
|
<ImportGroup Label="ExtensionTargets">
|
||||||
|
</ImportGroup>
|
||||||
|
</Project>
|
|
@ -0,0 +1,346 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: GetMaskString
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Get the mask string that are used for output to the console.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// The function only sets the existed permission in the mask string. If the
|
||||||
|
// permission does not exist, the corresponding character in mask string is not
|
||||||
|
// altered. The caller need to initilize the mask string to be all '-' to get
|
||||||
|
// the correct mask string.
|
||||||
|
//
|
||||||
|
static BOOL GetMaskString(INT accessMask, LPWSTR maskString)
|
||||||
|
{
|
||||||
|
if(wcslen(maskString) != 10)
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
if ((accessMask & UX_DIRECTORY) == UX_DIRECTORY)
|
||||||
|
maskString[0] = L'd';
|
||||||
|
else if ((accessMask & UX_SYMLINK) == UX_SYMLINK)
|
||||||
|
maskString[0] = L'l';
|
||||||
|
|
||||||
|
if ((accessMask & UX_U_READ) == UX_U_READ)
|
||||||
|
maskString[1] = L'r';
|
||||||
|
if ((accessMask & UX_U_WRITE) == UX_U_WRITE)
|
||||||
|
maskString[2] = L'w';
|
||||||
|
if ((accessMask & UX_U_EXECUTE) == UX_U_EXECUTE)
|
||||||
|
maskString[3] = L'x';
|
||||||
|
|
||||||
|
if ((accessMask & UX_G_READ) == UX_G_READ)
|
||||||
|
maskString[4] = L'r';
|
||||||
|
if ((accessMask & UX_G_WRITE) == UX_G_WRITE)
|
||||||
|
maskString[5] = L'w';
|
||||||
|
if ((accessMask & UX_G_EXECUTE) == UX_G_EXECUTE)
|
||||||
|
maskString[6] = L'x';
|
||||||
|
|
||||||
|
if ((accessMask & UX_O_READ) == UX_O_READ)
|
||||||
|
maskString[7] = L'r';
|
||||||
|
if ((accessMask & UX_O_WRITE) == UX_O_WRITE)
|
||||||
|
maskString[8] = L'w';
|
||||||
|
if ((accessMask & UX_O_EXECUTE) == UX_O_EXECUTE)
|
||||||
|
maskString[9] = L'x';
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: LsPrintLine
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Print one line of 'ls' command given all the information needed
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// None
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
// if useSeparator is false, separates the output tokens with a space
|
||||||
|
// character, otherwise, with a pipe character
|
||||||
|
//
|
||||||
|
static BOOL LsPrintLine(
|
||||||
|
const INT mask,
|
||||||
|
const DWORD hardlinkCount,
|
||||||
|
LPCWSTR ownerName,
|
||||||
|
LPCWSTR groupName,
|
||||||
|
const FILETIME *lpFileWritetime,
|
||||||
|
const LARGE_INTEGER fileSize,
|
||||||
|
LPCWSTR path,
|
||||||
|
BOOL useSeparator)
|
||||||
|
{
|
||||||
|
// 'd' + 'rwx' for user, group, other
|
||||||
|
static const size_t ck_ullMaskLen = 1 + 3 * 3;
|
||||||
|
|
||||||
|
LPWSTR maskString = NULL;
|
||||||
|
SYSTEMTIME stFileWriteTime;
|
||||||
|
BOOL ret = FALSE;
|
||||||
|
|
||||||
|
maskString = (LPWSTR)LocalAlloc(LPTR, (ck_ullMaskLen+1)*sizeof(WCHAR));
|
||||||
|
if (maskString == NULL)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build mask string from mask mode
|
||||||
|
if (FAILED(StringCchCopyW(maskString, (ck_ullMaskLen+1), L"----------")))
|
||||||
|
{
|
||||||
|
goto LsPrintLineEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!GetMaskString(mask, maskString))
|
||||||
|
{
|
||||||
|
goto LsPrintLineEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert file time to system time
|
||||||
|
if (!FileTimeToSystemTime(lpFileWritetime, &stFileWriteTime))
|
||||||
|
{
|
||||||
|
goto LsPrintLineEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (useSeparator)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"%10s|%d|%s|%s|%lld|%3s|%2d|%4d|%s\n",
|
||||||
|
maskString, hardlinkCount, ownerName, groupName, fileSize.QuadPart,
|
||||||
|
MONTHS[stFileWriteTime.wMonth-1], stFileWriteTime.wDay,
|
||||||
|
stFileWriteTime.wYear, path);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"%10s %d %s %s %lld %3s %2d %4d %s\n",
|
||||||
|
maskString, hardlinkCount, ownerName, groupName, fileSize.QuadPart,
|
||||||
|
MONTHS[stFileWriteTime.wMonth-1], stFileWriteTime.wDay,
|
||||||
|
stFileWriteTime.wYear, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = TRUE;
|
||||||
|
|
||||||
|
LsPrintLineEnd:
|
||||||
|
LocalFree(maskString);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
// List of command line options supported by "winutils ls"
|
||||||
|
enum CmdLineOption
|
||||||
|
{
|
||||||
|
CmdLineOptionFollowSymlink = 0x1, // "-L"
|
||||||
|
CmdLineOptionSeparator = 0x2 // "-F"
|
||||||
|
// options should be powers of 2 (aka next is 0x4)
|
||||||
|
};
|
||||||
|
|
||||||
|
static wchar_t* CurrentDir = L".";
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseCommandLine
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Parses the command line
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE on the valid command line, FALSE otherwise
|
||||||
|
//
|
||||||
|
BOOL ParseCommandLine(
|
||||||
|
int argc, wchar_t *argv[], wchar_t** path, int *optionsMask)
|
||||||
|
{
|
||||||
|
int MaxOptions = 2; // Should be equal to the number of elems in CmdLineOption
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
assert(optionsMask != NULL);
|
||||||
|
assert(argv != NULL);
|
||||||
|
assert(path != NULL);
|
||||||
|
|
||||||
|
*optionsMask = 0;
|
||||||
|
|
||||||
|
if (argc == 1)
|
||||||
|
{
|
||||||
|
// no path specified, assume "."
|
||||||
|
*path = CurrentDir;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argc == 2)
|
||||||
|
{
|
||||||
|
// only path specified, no other options
|
||||||
|
*path = argv[1];
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argc > 2 + MaxOptions)
|
||||||
|
{
|
||||||
|
// too many parameters
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 1; i < argc - 1; ++i)
|
||||||
|
{
|
||||||
|
if (wcscmp(argv[i], L"-L") == 0)
|
||||||
|
{
|
||||||
|
// Check if this option was already specified
|
||||||
|
BOOL alreadySet = *optionsMask & CmdLineOptionFollowSymlink;
|
||||||
|
if (alreadySet)
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
*optionsMask |= CmdLineOptionFollowSymlink;
|
||||||
|
}
|
||||||
|
else if (wcscmp(argv[i], L"-F") == 0)
|
||||||
|
{
|
||||||
|
// Check if this option was already specified
|
||||||
|
BOOL alreadySet = *optionsMask & CmdLineOptionSeparator;
|
||||||
|
if (alreadySet)
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
*optionsMask |= CmdLineOptionSeparator;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*path = argv[argc - 1];
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Ls
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// The main method for ls command
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// 0: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
//
|
||||||
|
int Ls(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
LPWSTR pathName = NULL;
|
||||||
|
LPWSTR longPathName = NULL;
|
||||||
|
|
||||||
|
BY_HANDLE_FILE_INFORMATION fileInformation;
|
||||||
|
|
||||||
|
LPWSTR ownerName = NULL;
|
||||||
|
LPWSTR groupName = NULL;
|
||||||
|
INT unixAccessMode = 0;
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
LARGE_INTEGER fileSize;
|
||||||
|
|
||||||
|
BOOL isSymlink = FALSE;
|
||||||
|
|
||||||
|
int ret = EXIT_FAILURE;
|
||||||
|
int optionsMask = 0;
|
||||||
|
|
||||||
|
if (!ParseCommandLine(argc, argv, &pathName, &optionsMask))
|
||||||
|
{
|
||||||
|
fwprintf(stderr, L"Incorrect command line arguments.\n\n");
|
||||||
|
LsUsage(argv[0]);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(pathName != NULL);
|
||||||
|
|
||||||
|
if (wcsspn(pathName, L"/?|><:*\"") != 0)
|
||||||
|
{
|
||||||
|
fwprintf(stderr, L"Incorrect file name format: %s\n", pathName);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the path the the long path
|
||||||
|
//
|
||||||
|
dwErrorCode = ConvertToLongPath(pathName, &longPathName);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"ConvertToLongPath", dwErrorCode);
|
||||||
|
goto LsEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwErrorCode = GetFileInformationByName(
|
||||||
|
longPathName, optionsMask & CmdLineOptionFollowSymlink, &fileInformation);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetFileInformationByName", dwErrorCode);
|
||||||
|
goto LsEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwErrorCode = SymbolicLinkCheck(longPathName, &isSymlink);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"IsSymbolicLink", dwErrorCode);
|
||||||
|
goto LsEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isSymlink)
|
||||||
|
unixAccessMode |= UX_SYMLINK;
|
||||||
|
else if (IsDirFileInfo(&fileInformation))
|
||||||
|
unixAccessMode |= UX_DIRECTORY;
|
||||||
|
|
||||||
|
dwErrorCode = FindFileOwnerAndPermission(longPathName,
|
||||||
|
&ownerName, &groupName, &unixAccessMode);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"FindFileOwnerAndPermission", dwErrorCode);
|
||||||
|
goto LsEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
fileSize.HighPart = fileInformation.nFileSizeHigh;
|
||||||
|
fileSize.LowPart = fileInformation.nFileSizeLow;
|
||||||
|
|
||||||
|
// Print output using the input path name (not the long one)
|
||||||
|
//
|
||||||
|
if (!LsPrintLine(unixAccessMode,
|
||||||
|
fileInformation.nNumberOfLinks,
|
||||||
|
ownerName, groupName,
|
||||||
|
&fileInformation.ftLastWriteTime,
|
||||||
|
fileSize,
|
||||||
|
pathName,
|
||||||
|
optionsMask & CmdLineOptionSeparator))
|
||||||
|
goto LsEnd;
|
||||||
|
|
||||||
|
ret = EXIT_SUCCESS;
|
||||||
|
|
||||||
|
LsEnd:
|
||||||
|
LocalFree(ownerName);
|
||||||
|
LocalFree(groupName);
|
||||||
|
LocalFree(longPathName);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
void LsUsage(LPCWSTR program)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: %s [OPTIONS] [FILE]\n\
|
||||||
|
List information about the FILE (the current directory by default).\n\
|
||||||
|
Using long listing format and list directory entries instead of contents,\n\
|
||||||
|
and do not dereference symbolic links.\n\
|
||||||
|
Provides equivalent or similar function as 'ls -ld' on GNU/Linux.\n\
|
||||||
|
\n\
|
||||||
|
OPTIONS: -L dereference symbolic links\n\
|
||||||
|
-F format the output by separating tokens with a pipe\n",
|
||||||
|
program);
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
static void Usage(LPCWSTR program);
|
||||||
|
|
||||||
|
int wmain(int argc, wchar_t* argv[])
|
||||||
|
{
|
||||||
|
LPCWSTR cmd = NULL;
|
||||||
|
|
||||||
|
if (argc < 2)
|
||||||
|
{
|
||||||
|
Usage(argv[0]);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd = argv[1];
|
||||||
|
|
||||||
|
if (wcscmp(L"ls", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Ls(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"chmod", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Chmod(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"chown", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Chown(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"groups", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Groups(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"hardlink", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Hardlink(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"symlink", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Symlink(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"task", cmd) == 0)
|
||||||
|
{
|
||||||
|
return Task(argc - 1, argv + 1);
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"systeminfo", cmd) == 0)
|
||||||
|
{
|
||||||
|
return SystemInfo();
|
||||||
|
}
|
||||||
|
else if (wcscmp(L"help", cmd) == 0)
|
||||||
|
{
|
||||||
|
Usage(argv[0]);
|
||||||
|
return EXIT_SUCCESS;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Usage(argv[0]);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void Usage(LPCWSTR program)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"Usage: %s [command] ...\n\
|
||||||
|
Provide basic command line utilities for Hadoop on Windows.\n\n\
|
||||||
|
The available commands and their usages are:\n\n", program);
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"chmod", L"Change file mode bits.");
|
||||||
|
ChmodUsage(L"chmod");
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"chown", L"Change file owner.");
|
||||||
|
ChownUsage(L"chown");
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"groups", L"List user groups.");
|
||||||
|
GroupsUsage(L"groups");
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"hardlink", L"Hard link operations.");
|
||||||
|
HardlinkUsage();
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"ls", L"List file information.");
|
||||||
|
LsUsage(L"ls");
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-10s%s\n\n", L"symlink", L"Create a symbolic link.");
|
||||||
|
SymlinkUsage();
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"systeminfo", L"System information.");
|
||||||
|
SystemInfoUsage();
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
|
||||||
|
fwprintf(stdout, L"%-15s%s\n\n", L"task", L"Task operations.");
|
||||||
|
TaskUsage();
|
||||||
|
fwprintf(stdout, L"\n\n");
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Symlink
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// The main method for symlink command
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// 0: on success
|
||||||
|
//
|
||||||
|
// Notes:
|
||||||
|
//
|
||||||
|
int Symlink(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
PWSTR longLinkName = NULL;
|
||||||
|
PWSTR longFileName = NULL;
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
|
||||||
|
BOOL isDir = FALSE;
|
||||||
|
|
||||||
|
DWORD dwRtnCode = ERROR_SUCCESS;
|
||||||
|
DWORD dwFlag = 0;
|
||||||
|
|
||||||
|
int ret = SUCCESS;
|
||||||
|
|
||||||
|
if (argc != 3)
|
||||||
|
{
|
||||||
|
SymlinkUsage();
|
||||||
|
return FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
dwErrorCode = ConvertToLongPath(argv[1], &longLinkName);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ret = FAILURE;
|
||||||
|
goto SymlinkEnd;
|
||||||
|
}
|
||||||
|
dwErrorCode = ConvertToLongPath(argv[2], &longFileName);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ret = FAILURE;
|
||||||
|
goto SymlinkEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the the process's access token has the privilege to create
|
||||||
|
// symbolic links. Without this step, the call to CreateSymbolicLink() from
|
||||||
|
// users have the privilege to create symbolic links will still succeed.
|
||||||
|
// This is just an additional step to do the privilege check by not using
|
||||||
|
// error code from CreateSymbolicLink() method.
|
||||||
|
//
|
||||||
|
if (!EnablePrivilege(L"SeCreateSymbolicLinkPrivilege"))
|
||||||
|
{
|
||||||
|
fwprintf(stderr,
|
||||||
|
L"No privilege to create symbolic links.\n");
|
||||||
|
ret = SYMLINK_NO_PRIVILEGE;
|
||||||
|
goto SymlinkEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((dwRtnCode = DirectoryCheck(longFileName, &isDir)) != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"DirectoryCheck", dwRtnCode);
|
||||||
|
ret = FAILURE;
|
||||||
|
goto SymlinkEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isDir)
|
||||||
|
dwFlag = SYMBOLIC_LINK_FLAG_DIRECTORY;
|
||||||
|
|
||||||
|
if (!CreateSymbolicLinkW(longLinkName, longFileName, dwFlag))
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"CreateSymbolicLink", GetLastError());
|
||||||
|
ret = FAILURE;
|
||||||
|
goto SymlinkEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
SymlinkEnd:
|
||||||
|
LocalFree(longLinkName);
|
||||||
|
LocalFree(longFileName);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SymlinkUsage()
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: symlink [LINKNAME] [FILENAME]\n\
|
||||||
|
Creates a symbolic link\n\
|
||||||
|
\n\
|
||||||
|
0 is returned on success.\n\
|
||||||
|
2 is returned if the user does no have privilege to create symbolic links.\n\
|
||||||
|
1 is returned for all other errors.\n\
|
||||||
|
\n\
|
||||||
|
The default security settings in Windows disallow non-elevated administrators\n\
|
||||||
|
and all non-administrators from creating symbolic links. The security settings\n\
|
||||||
|
for symbolic links can be changed in the Local Security Policy management\n\
|
||||||
|
console.\n");
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,120 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
#include <psapi.h>
|
||||||
|
#include <PowrProf.h>
|
||||||
|
|
||||||
|
#define PSAPI_VERSION 1
|
||||||
|
#pragma comment(lib, "psapi.lib")
|
||||||
|
#pragma comment(lib, "Powrprof.lib")
|
||||||
|
|
||||||
|
typedef struct _PROCESSOR_POWER_INFORMATION {
|
||||||
|
ULONG Number;
|
||||||
|
ULONG MaxMhz;
|
||||||
|
ULONG CurrentMhz;
|
||||||
|
ULONG MhzLimit;
|
||||||
|
ULONG MaxIdleState;
|
||||||
|
ULONG CurrentIdleState;
|
||||||
|
} PROCESSOR_POWER_INFORMATION, *PPROCESSOR_POWER_INFORMATION;
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: SystemInfo
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Returns the resource information about the machine
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// EXIT_SUCCESS: On success
|
||||||
|
// EXIT_FAILURE: otherwise
|
||||||
|
int SystemInfo()
|
||||||
|
{
|
||||||
|
size_t vmemSize, vmemFree, memSize, memFree;
|
||||||
|
PERFORMANCE_INFORMATION memInfo;
|
||||||
|
SYSTEM_INFO sysInfo;
|
||||||
|
FILETIME idleTimeFt, kernelTimeFt, userTimeFt;
|
||||||
|
ULARGE_INTEGER idleTime, kernelTime, userTime;
|
||||||
|
ULONGLONG cpuTimeMs;
|
||||||
|
size_t size;
|
||||||
|
LPBYTE pBuffer;
|
||||||
|
PPROCESSOR_POWER_INFORMATION ppi;
|
||||||
|
long cpuFrequencyKhz;
|
||||||
|
NTSTATUS status;
|
||||||
|
|
||||||
|
ZeroMemory(&memInfo, sizeof(PERFORMANCE_INFORMATION));
|
||||||
|
memInfo.cb = sizeof(PERFORMANCE_INFORMATION);
|
||||||
|
if(!GetPerformanceInfo(&memInfo, sizeof(PERFORMANCE_INFORMATION)))
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetPerformanceInfo", GetLastError());
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
vmemSize = memInfo.CommitLimit*memInfo.PageSize;
|
||||||
|
vmemFree = vmemSize - memInfo.CommitTotal*memInfo.PageSize;
|
||||||
|
memSize = memInfo.PhysicalTotal*memInfo.PageSize;
|
||||||
|
memFree = memInfo.PhysicalAvailable*memInfo.PageSize;
|
||||||
|
|
||||||
|
GetSystemInfo(&sysInfo);
|
||||||
|
|
||||||
|
if(!GetSystemTimes(&idleTimeFt, &kernelTimeFt, &userTimeFt))
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"GetSystemTimes", GetLastError());
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
idleTime.HighPart = idleTimeFt.dwHighDateTime;
|
||||||
|
idleTime.LowPart = idleTimeFt.dwLowDateTime;
|
||||||
|
kernelTime.HighPart = kernelTimeFt.dwHighDateTime;
|
||||||
|
kernelTime.LowPart = kernelTimeFt.dwLowDateTime;
|
||||||
|
userTime.HighPart = userTimeFt.dwHighDateTime;
|
||||||
|
userTime.LowPart = userTimeFt.dwLowDateTime;
|
||||||
|
|
||||||
|
cpuTimeMs = (kernelTime.QuadPart - idleTime.QuadPart + userTime.QuadPart)/10000;
|
||||||
|
|
||||||
|
// allocate buffer to get info for each processor
|
||||||
|
size = sysInfo.dwNumberOfProcessors * sizeof(PROCESSOR_POWER_INFORMATION);
|
||||||
|
pBuffer = (BYTE*) LocalAlloc(LPTR, size);
|
||||||
|
if(!pBuffer)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"LocalAlloc", GetLastError());
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
status = CallNtPowerInformation(ProcessorInformation, NULL, 0, pBuffer, (long)size);
|
||||||
|
if(0 != status)
|
||||||
|
{
|
||||||
|
fwprintf_s(stderr, L"Error in CallNtPowerInformation. Err:%d\n", status);
|
||||||
|
LocalFree(pBuffer);
|
||||||
|
return EXIT_FAILURE;
|
||||||
|
}
|
||||||
|
ppi = (PPROCESSOR_POWER_INFORMATION)pBuffer;
|
||||||
|
cpuFrequencyKhz = ppi->MaxMhz*1000;
|
||||||
|
LocalFree(pBuffer);
|
||||||
|
|
||||||
|
fwprintf_s(stdout, L"%Iu,%Iu,%Iu,%Iu,%Iu,%Iu,%Iu\n", vmemSize, memSize, vmemFree, memFree, sysInfo.dwNumberOfProcessors, cpuFrequencyKhz, cpuTimeMs);
|
||||||
|
|
||||||
|
return EXIT_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SystemInfoUsage()
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: systeminfo\n\
|
||||||
|
Prints machine information on stdout\n\
|
||||||
|
Comma separated list of the following values.\n\
|
||||||
|
VirtualMemorySize(bytes),PhysicalMemorySize(bytes),\n\
|
||||||
|
FreeVirtualMemory(bytes),FreePhysicalMemory(bytes),\n\
|
||||||
|
NumberOfProcessors,CpuFrequency(Khz),\n\
|
||||||
|
CpuTime(MilliSec,Kernel+User)\n");
|
||||||
|
}
|
|
@ -0,0 +1,461 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with this
|
||||||
|
* work for additional information regarding copyright ownership. The ASF
|
||||||
|
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations under
|
||||||
|
* the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "winutils.h"
|
||||||
|
#include <errno.h>
|
||||||
|
#include <psapi.h>
|
||||||
|
|
||||||
|
#define PSAPI_VERSION 1
|
||||||
|
#pragma comment(lib, "psapi.lib")
|
||||||
|
|
||||||
|
#define ERROR_TASK_NOT_ALIVE 1
|
||||||
|
|
||||||
|
// List of different task related command line options supported by
|
||||||
|
// winutils.
|
||||||
|
typedef enum TaskCommandOptionType
|
||||||
|
{
|
||||||
|
TaskInvalid,
|
||||||
|
TaskCreate,
|
||||||
|
TaskIsAlive,
|
||||||
|
TaskKill,
|
||||||
|
TaskProcessList
|
||||||
|
} TaskCommandOption;
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: ParseCommandLine
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Parses the given command line. On success, out param 'command' contains
|
||||||
|
// the user specified command.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// TRUE: If the command line is valid
|
||||||
|
// FALSE: otherwise
|
||||||
|
static BOOL ParseCommandLine(__in int argc,
|
||||||
|
__in wchar_t *argv[],
|
||||||
|
__out TaskCommandOption *command)
|
||||||
|
{
|
||||||
|
*command = TaskInvalid;
|
||||||
|
|
||||||
|
if (wcscmp(argv[0], L"task") != 0 )
|
||||||
|
{
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argc == 3) {
|
||||||
|
if (wcscmp(argv[1], L"isAlive") == 0)
|
||||||
|
{
|
||||||
|
*command = TaskIsAlive;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
if (wcscmp(argv[1], L"kill") == 0)
|
||||||
|
{
|
||||||
|
*command = TaskKill;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
if (wcscmp(argv[1], L"processList") == 0)
|
||||||
|
{
|
||||||
|
*command = TaskProcessList;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argc == 4) {
|
||||||
|
if (wcscmp(argv[1], L"create") == 0)
|
||||||
|
{
|
||||||
|
*command = TaskCreate;
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: createTask
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Creates a task via a jobobject. Outputs the
|
||||||
|
// appropriate information to stdout on success, or stderr on failure.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// GetLastError: otherwise
|
||||||
|
DWORD createTask(_TCHAR* jobObjName, _TCHAR* cmdLine)
|
||||||
|
{
|
||||||
|
DWORD err = ERROR_SUCCESS;
|
||||||
|
DWORD exitCode = EXIT_FAILURE;
|
||||||
|
STARTUPINFO si;
|
||||||
|
PROCESS_INFORMATION pi;
|
||||||
|
HANDLE jobObject = NULL;
|
||||||
|
JOBOBJECT_EXTENDED_LIMIT_INFORMATION jeli = { 0 };
|
||||||
|
|
||||||
|
// Create un-inheritable job object handle and set job object to terminate
|
||||||
|
// when last handle is closed. So winutils.exe invocation has the only open
|
||||||
|
// job object handle. Exit of winutils.exe ensures termination of job object.
|
||||||
|
// Either a clean exit of winutils or crash or external termination.
|
||||||
|
jobObject = CreateJobObject(NULL, jobObjName);
|
||||||
|
err = GetLastError();
|
||||||
|
if(jobObject == NULL || err == ERROR_ALREADY_EXISTS)
|
||||||
|
{
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
jeli.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
|
||||||
|
if(SetInformationJobObject(jobObject,
|
||||||
|
JobObjectExtendedLimitInformation,
|
||||||
|
&jeli,
|
||||||
|
sizeof(jeli)) == 0)
|
||||||
|
{
|
||||||
|
err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(AssignProcessToJobObject(jobObject, GetCurrentProcess()) == 0)
|
||||||
|
{
|
||||||
|
err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
// the child JVM uses this env var to send the task OS process identifier
|
||||||
|
// to the TaskTracker. We pass the job object name.
|
||||||
|
if(SetEnvironmentVariable(_T("JVM_PID"), jobObjName) == 0)
|
||||||
|
{
|
||||||
|
err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
ZeroMemory( &si, sizeof(si) );
|
||||||
|
si.cb = sizeof(si);
|
||||||
|
ZeroMemory( &pi, sizeof(pi) );
|
||||||
|
if(CreateProcess(NULL, cmdLine, NULL, NULL, TRUE, 0, NULL, NULL, &si, &pi) == 0)
|
||||||
|
{
|
||||||
|
err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
CloseHandle(pi.hThread);
|
||||||
|
|
||||||
|
// Wait until child process exits.
|
||||||
|
WaitForSingleObject( pi.hProcess, INFINITE );
|
||||||
|
if(GetExitCodeProcess(pi.hProcess, &exitCode) == 0)
|
||||||
|
{
|
||||||
|
err = GetLastError();
|
||||||
|
}
|
||||||
|
CloseHandle( pi.hProcess );
|
||||||
|
|
||||||
|
// Terminate job object so that all spawned processes are also killed.
|
||||||
|
// This is needed because once this process closes the handle to the job
|
||||||
|
// object and none of the spawned objects have the handle open (via
|
||||||
|
// inheritance on creation) then it will not be possible for any other external
|
||||||
|
// program (say winutils task kill) to terminate this job object via its name.
|
||||||
|
if(TerminateJobObject(jobObject, exitCode) == 0)
|
||||||
|
{
|
||||||
|
err = GetLastError();
|
||||||
|
}
|
||||||
|
|
||||||
|
// comes here only on failure or TerminateJobObject
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
|
||||||
|
if(err != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
return exitCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: isTaskAlive
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Checks if a task is alive via a jobobject. Outputs the
|
||||||
|
// appropriate information to stdout on success, or stderr on failure.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// GetLastError: otherwise
|
||||||
|
DWORD isTaskAlive(const _TCHAR* jobObjName, int* isAlive, int* procsInJob)
|
||||||
|
{
|
||||||
|
PJOBOBJECT_BASIC_PROCESS_ID_LIST procList;
|
||||||
|
HANDLE jobObject = NULL;
|
||||||
|
int numProcs = 100;
|
||||||
|
|
||||||
|
*isAlive = FALSE;
|
||||||
|
|
||||||
|
jobObject = OpenJobObject(JOB_OBJECT_QUERY, FALSE, jobObjName);
|
||||||
|
|
||||||
|
if(jobObject == NULL)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
if(err == ERROR_FILE_NOT_FOUND)
|
||||||
|
{
|
||||||
|
// job object does not exist. assume its not alive
|
||||||
|
return ERROR_SUCCESS;
|
||||||
|
}
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
procList = (PJOBOBJECT_BASIC_PROCESS_ID_LIST) LocalAlloc(LPTR, sizeof (JOBOBJECT_BASIC_PROCESS_ID_LIST) + numProcs*32);
|
||||||
|
if (!procList)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
if(QueryInformationJobObject(jobObject, JobObjectBasicProcessIdList, procList, sizeof(JOBOBJECT_BASIC_PROCESS_ID_LIST)+numProcs*32, NULL) == 0)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
if(err != ERROR_MORE_DATA)
|
||||||
|
{
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
LocalFree(procList);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(procList->NumberOfAssignedProcesses > 0)
|
||||||
|
{
|
||||||
|
*isAlive = TRUE;
|
||||||
|
*procsInJob = procList->NumberOfAssignedProcesses;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalFree(procList);
|
||||||
|
|
||||||
|
return ERROR_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: killTask
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Kills a task via a jobobject. Outputs the
|
||||||
|
// appropriate information to stdout on success, or stderr on failure.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// GetLastError: otherwise
|
||||||
|
DWORD killTask(_TCHAR* jobObjName)
|
||||||
|
{
|
||||||
|
HANDLE jobObject = OpenJobObject(JOB_OBJECT_TERMINATE, FALSE, jobObjName);
|
||||||
|
if(jobObject == NULL)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
if(err == ERROR_FILE_NOT_FOUND)
|
||||||
|
{
|
||||||
|
// job object does not exist. assume its not alive
|
||||||
|
return ERROR_SUCCESS;
|
||||||
|
}
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(TerminateJobObject(jobObject, 1) == 0)
|
||||||
|
{
|
||||||
|
return GetLastError();
|
||||||
|
}
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
|
||||||
|
return ERROR_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: printTaskProcessList
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Prints resource usage of all processes in the task jobobject
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// GetLastError: otherwise
|
||||||
|
DWORD printTaskProcessList(const _TCHAR* jobObjName)
|
||||||
|
{
|
||||||
|
DWORD i;
|
||||||
|
PJOBOBJECT_BASIC_PROCESS_ID_LIST procList;
|
||||||
|
int numProcs = 100;
|
||||||
|
HANDLE jobObject = OpenJobObject(JOB_OBJECT_QUERY, FALSE, jobObjName);
|
||||||
|
if(jobObject == NULL)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
procList = (PJOBOBJECT_BASIC_PROCESS_ID_LIST) LocalAlloc(LPTR, sizeof (JOBOBJECT_BASIC_PROCESS_ID_LIST) + numProcs*32);
|
||||||
|
if (!procList)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
while(QueryInformationJobObject(jobObject, JobObjectBasicProcessIdList, procList, sizeof(JOBOBJECT_BASIC_PROCESS_ID_LIST)+numProcs*32, NULL) == 0)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
if(err != ERROR_MORE_DATA)
|
||||||
|
{
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
LocalFree(procList);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
numProcs = procList->NumberOfAssignedProcesses;
|
||||||
|
LocalFree(procList);
|
||||||
|
procList = (PJOBOBJECT_BASIC_PROCESS_ID_LIST) LocalAlloc(LPTR, sizeof (JOBOBJECT_BASIC_PROCESS_ID_LIST) + numProcs*32);
|
||||||
|
if (!procList)
|
||||||
|
{
|
||||||
|
DWORD err = GetLastError();
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for(i=0; i<procList->NumberOfProcessIdsInList; ++i)
|
||||||
|
{
|
||||||
|
HANDLE hProcess = OpenProcess( PROCESS_QUERY_INFORMATION, FALSE, (DWORD)procList->ProcessIdList[i] );
|
||||||
|
if( hProcess != NULL )
|
||||||
|
{
|
||||||
|
PROCESS_MEMORY_COUNTERS_EX pmc;
|
||||||
|
if ( GetProcessMemoryInfo( hProcess, (PPROCESS_MEMORY_COUNTERS)&pmc, sizeof(pmc)) )
|
||||||
|
{
|
||||||
|
FILETIME create, exit, kernel, user;
|
||||||
|
if( GetProcessTimes( hProcess, &create, &exit, &kernel, &user) )
|
||||||
|
{
|
||||||
|
ULARGE_INTEGER kernelTime, userTime;
|
||||||
|
ULONGLONG cpuTimeMs;
|
||||||
|
kernelTime.HighPart = kernel.dwHighDateTime;
|
||||||
|
kernelTime.LowPart = kernel.dwLowDateTime;
|
||||||
|
userTime.HighPart = user.dwHighDateTime;
|
||||||
|
userTime.LowPart = user.dwLowDateTime;
|
||||||
|
cpuTimeMs = (kernelTime.QuadPart+userTime.QuadPart)/10000;
|
||||||
|
_ftprintf_s(stdout, TEXT("%u,%Iu,%Iu,%Iu\n"), procList->ProcessIdList[i], pmc.PrivateUsage, pmc.WorkingSetSize, cpuTimeMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CloseHandle( hProcess );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
LocalFree(procList);
|
||||||
|
CloseHandle(jobObject);
|
||||||
|
|
||||||
|
return ERROR_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------
|
||||||
|
// Function: Task
|
||||||
|
//
|
||||||
|
// Description:
|
||||||
|
// Manages a task via a jobobject (create/isAlive/kill). Outputs the
|
||||||
|
// appropriate information to stdout on success, or stderr on failure.
|
||||||
|
//
|
||||||
|
// Returns:
|
||||||
|
// ERROR_SUCCESS: On success
|
||||||
|
// Error code otherwise: otherwise
|
||||||
|
int Task(int argc, wchar_t *argv[])
|
||||||
|
{
|
||||||
|
DWORD dwErrorCode = ERROR_SUCCESS;
|
||||||
|
TaskCommandOption command = TaskInvalid;
|
||||||
|
|
||||||
|
if (!ParseCommandLine(argc, argv, &command)) {
|
||||||
|
dwErrorCode = ERROR_INVALID_COMMAND_LINE;
|
||||||
|
|
||||||
|
fwprintf(stderr, L"Incorrect command line arguments.\n\n");
|
||||||
|
TaskUsage();
|
||||||
|
goto TaskExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (command == TaskCreate)
|
||||||
|
{
|
||||||
|
// Create the task jobobject
|
||||||
|
//
|
||||||
|
dwErrorCode = createTask(argv[2], argv[3]);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"createTask", dwErrorCode);
|
||||||
|
goto TaskExit;
|
||||||
|
}
|
||||||
|
} else if (command == TaskIsAlive)
|
||||||
|
{
|
||||||
|
// Check if task jobobject
|
||||||
|
//
|
||||||
|
int isAlive;
|
||||||
|
int numProcs;
|
||||||
|
dwErrorCode = isTaskAlive(argv[2], &isAlive, &numProcs);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"isTaskAlive", dwErrorCode);
|
||||||
|
goto TaskExit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output the result
|
||||||
|
if(isAlive == TRUE)
|
||||||
|
{
|
||||||
|
fwprintf(stdout, L"IsAlive,%d\n", numProcs);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
dwErrorCode = ERROR_TASK_NOT_ALIVE;
|
||||||
|
ReportErrorCode(L"isTaskAlive returned false", dwErrorCode);
|
||||||
|
goto TaskExit;
|
||||||
|
}
|
||||||
|
} else if (command == TaskKill)
|
||||||
|
{
|
||||||
|
// Check if task jobobject
|
||||||
|
//
|
||||||
|
dwErrorCode = killTask(argv[2]);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"killTask", dwErrorCode);
|
||||||
|
goto TaskExit;
|
||||||
|
}
|
||||||
|
} else if (command == TaskProcessList)
|
||||||
|
{
|
||||||
|
// Check if task jobobject
|
||||||
|
//
|
||||||
|
dwErrorCode = printTaskProcessList(argv[2]);
|
||||||
|
if (dwErrorCode != ERROR_SUCCESS)
|
||||||
|
{
|
||||||
|
ReportErrorCode(L"printTaskProcessList", dwErrorCode);
|
||||||
|
goto TaskExit;
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
// Should not happen
|
||||||
|
//
|
||||||
|
assert(FALSE);
|
||||||
|
}
|
||||||
|
|
||||||
|
TaskExit:
|
||||||
|
return dwErrorCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
void TaskUsage()
|
||||||
|
{
|
||||||
|
// Hadoop code checks for this string to determine if
|
||||||
|
// jobobject's are being used.
|
||||||
|
// ProcessTree.isSetsidSupported()
|
||||||
|
fwprintf(stdout, L"\
|
||||||
|
Usage: task create [TASKNAME] [COMMAND_LINE] |\n\
|
||||||
|
task isAlive [TASKNAME] |\n\
|
||||||
|
task kill [TASKNAME]\n\
|
||||||
|
task processList [TASKNAME]\n\
|
||||||
|
Creates a new task jobobject with taskname\n\
|
||||||
|
Checks if task jobobject is alive\n\
|
||||||
|
Kills task jobobject\n\
|
||||||
|
Prints to stdout a list of processes in the task\n\
|
||||||
|
along with their resource usage. One process per line\n\
|
||||||
|
and comma separated info per process\n\
|
||||||
|
ProcessId,VirtualMemoryCommitted(bytes),\n\
|
||||||
|
WorkingSetSize(bytes),CpuTime(Millisec,Kernel+User)\n");
|
||||||
|
}
|
|
@ -0,0 +1,55 @@
|
||||||
|
|
||||||
|
Microsoft Visual Studio Solution File, Format Version 11.00
|
||||||
|
# Visual Studio 2010
|
||||||
|
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winutils", "winutils.vcxproj", "{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}"
|
||||||
|
ProjectSection(ProjectDependencies) = postProject
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12} = {12131AA7-902E-4A6D-9CE3-043261D22A12}
|
||||||
|
EndProjectSection
|
||||||
|
EndProject
|
||||||
|
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libwinutils", "libwinutils.vcxproj", "{12131AA7-902E-4A6D-9CE3-043261D22A12}"
|
||||||
|
EndProject
|
||||||
|
Global
|
||||||
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
|
Debug|Win32 = Debug|Win32
|
||||||
|
Debug|x64 = Debug|x64
|
||||||
|
Release|Win32 = Release|Win32
|
||||||
|
Release|x64 = Release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.ActiveCfg = Debug|x64
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.Build.0 = Debug|x64
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.ActiveCfg = Debug|x64
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.Build.0 = Debug|x64
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.ActiveCfg = Release|Win32
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.Build.0 = Release|Win32
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.ActiveCfg = Release|x64
|
||||||
|
{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.Build.0 = Release|x64
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.ActiveCfg = Debug|x64
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.Build.0 = Debug|x64
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.ActiveCfg = Debug|x64
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.Build.0 = Debug|x64
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.ActiveCfg = Release|Win32
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.Build.0 = Release|Win32
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.ActiveCfg = Release|x64
|
||||||
|
{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.Build.0 = Release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
|
HideSolutionNode = FALSE
|
||||||
|
EndGlobalSection
|
||||||
|
EndGlobal
|
|
@ -0,0 +1,181 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup Label="ProjectConfigurations">
|
||||||
|
<ProjectConfiguration Include="Debug|Win32">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Debug|x64">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|Win32">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|x64">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
</ItemGroup>
|
||||||
|
<PropertyGroup Label="Globals">
|
||||||
|
<ProjectGuid>{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}</ProjectGuid>
|
||||||
|
<Keyword>Win32Proj</Keyword>
|
||||||
|
<RootNamespace>winutils</RootNamespace>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>true</UseDebugLibraries>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>true</UseDebugLibraries>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||||
|
<ImportGroup Label="ExtensionSettings">
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
</ImportGroup>
|
||||||
|
<PropertyGroup Label="UserMacros" />
|
||||||
|
<PropertyGroup>
|
||||||
|
<IncludePath>include;$(IncludePath)</IncludePath>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||||
|
<LinkIncremental>true</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||||
|
<LinkIncremental>true</LinkIncremental>
|
||||||
|
<OutDir />
|
||||||
|
<IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
<IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
|
||||||
|
<OutDir>..\..\..\target\bin\</OutDir>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||||
|
<ClCompile>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<Optimization>Disabled</Optimization>
|
||||||
|
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||||
|
<ClCompile>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<WarningLevel>Level4</WarningLevel>
|
||||||
|
<Optimization>Disabled</Optimization>
|
||||||
|
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
<OptimizeReferences>true</OptimizeReferences>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
<OptimizeReferences>true</OptimizeReferences>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="symlink.c" />
|
||||||
|
<ClCompile Include="systeminfo.c" />
|
||||||
|
<ClCompile Include="chmod.c" />
|
||||||
|
<ClCompile Include="chown.c" />
|
||||||
|
<ClCompile Include="groups.c" />
|
||||||
|
<ClCompile Include="hardlink.c" />
|
||||||
|
<ClCompile Include="task.c" />
|
||||||
|
<ClCompile Include="ls.c" />
|
||||||
|
<ClCompile Include="main.c" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="libwinutils.vcxproj">
|
||||||
|
<Project>{12131aa7-902e-4a6d-9ce3-043261d22a12}</Project>
|
||||||
|
</ProjectReference>
|
||||||
|
</ItemGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||||
|
<ImportGroup Label="ExtensionTargets">
|
||||||
|
</ImportGroup>
|
||||||
|
</Project>
|
|
@ -33,9 +33,7 @@ Single Node Setup
|
||||||
* GNU/Linux is supported as a development and production platform.
|
* GNU/Linux is supported as a development and production platform.
|
||||||
Hadoop has been demonstrated on GNU/Linux clusters with 2000 nodes.
|
Hadoop has been demonstrated on GNU/Linux clusters with 2000 nodes.
|
||||||
|
|
||||||
* Win32 is supported as a development platform. Distributed operation
|
* Windows is also a supported platform.
|
||||||
has not been well tested on Win32, so it is not supported as a
|
|
||||||
production platform.
|
|
||||||
|
|
||||||
** Required Software
|
** Required Software
|
||||||
|
|
||||||
|
@ -46,11 +44,6 @@ Single Node Setup
|
||||||
[[2]] ssh must be installed and sshd must be running to use the Hadoop
|
[[2]] ssh must be installed and sshd must be running to use the Hadoop
|
||||||
scripts that manage remote Hadoop daemons.
|
scripts that manage remote Hadoop daemons.
|
||||||
|
|
||||||
Additional requirements for Windows include:
|
|
||||||
|
|
||||||
[[1]] Cygwin - Required for shell support in addition to the required
|
|
||||||
software above.
|
|
||||||
|
|
||||||
** Installing Software
|
** Installing Software
|
||||||
|
|
||||||
If your cluster doesn't have the requisite software you will need to
|
If your cluster doesn't have the requisite software you will need to
|
||||||
|
@ -63,11 +56,6 @@ Single Node Setup
|
||||||
$ sudo apt-get install rsync
|
$ sudo apt-get install rsync
|
||||||
----
|
----
|
||||||
|
|
||||||
On Windows, if you did not install the required software when you
|
|
||||||
installed cygwin, start the cygwin installer and select the packages:
|
|
||||||
|
|
||||||
* openssh - the Net category
|
|
||||||
|
|
||||||
* Download
|
* Download
|
||||||
|
|
||||||
To get a Hadoop distribution, download a recent stable release from one
|
To get a Hadoop distribution, download a recent stable release from one
|
||||||
|
|
|
@ -68,7 +68,7 @@ public static Path getTestRootPath(FileContext fc, String pathString) {
|
||||||
public static String getAbsoluteTestRootDir(FileContext fc)
|
public static String getAbsoluteTestRootDir(FileContext fc)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
if (absTestRootDir == null) {
|
if (absTestRootDir == null) {
|
||||||
if (TEST_ROOT_DIR.startsWith("/")) {
|
if (new Path(TEST_ROOT_DIR).isAbsolute()) {
|
||||||
absTestRootDir = TEST_ROOT_DIR;
|
absTestRootDir = TEST_ROOT_DIR;
|
||||||
} else {
|
} else {
|
||||||
absTestRootDir = fc.getWorkingDirectory().toString() + "/"
|
absTestRootDir = fc.getWorkingDirectory().toString() + "/"
|
||||||
|
|
|
@ -20,9 +20,11 @@
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
import junit.framework.Assert;
|
import junit.framework.Assert;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -52,6 +54,12 @@ public abstract class FileContextURIBase {
|
||||||
private static final String basePath = System.getProperty("test.build.data",
|
private static final String basePath = System.getProperty("test.build.data",
|
||||||
"build/test/data") + "/testContextURI";
|
"build/test/data") + "/testContextURI";
|
||||||
private static final Path BASE = new Path(basePath);
|
private static final Path BASE = new Path(basePath);
|
||||||
|
|
||||||
|
// Matches anything containing <, >, :, ", |, ?, *, or anything that ends with
|
||||||
|
// space or dot.
|
||||||
|
private static final Pattern WIN_INVALID_FILE_NAME_PATTERN = Pattern.compile(
|
||||||
|
"^(.*?[<>\\:\"\\|\\?\\*].*?)|(.*?[ \\.])$");
|
||||||
|
|
||||||
protected FileContext fc1;
|
protected FileContext fc1;
|
||||||
protected FileContext fc2;
|
protected FileContext fc2;
|
||||||
|
|
||||||
|
@ -81,6 +89,10 @@ public void testCreateFile() throws IOException {
|
||||||
" ", "^ " };
|
" ", "^ " };
|
||||||
|
|
||||||
for (String f : fileNames) {
|
for (String f : fileNames) {
|
||||||
|
if (!isTestableFileNameOnPlatform(f)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Create a file on fc2's file system using fc1
|
// Create a file on fc2's file system using fc1
|
||||||
Path testPath = qualifiedPath(f, fc2);
|
Path testPath = qualifiedPath(f, fc2);
|
||||||
// Ensure file does not exist
|
// Ensure file does not exist
|
||||||
|
@ -205,6 +217,10 @@ public void testCreateDirectory() throws IOException {
|
||||||
"deleteTest/()&^%$#@!~_+}{><?", " ", "^ " };
|
"deleteTest/()&^%$#@!~_+}{><?", " ", "^ " };
|
||||||
|
|
||||||
for (String f : dirNames) {
|
for (String f : dirNames) {
|
||||||
|
if (!isTestableFileNameOnPlatform(f)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Create a file on fc2's file system using fc1
|
// Create a file on fc2's file system using fc1
|
||||||
Path testPath = qualifiedPath(f, fc2);
|
Path testPath = qualifiedPath(f, fc2);
|
||||||
// Ensure file does not exist
|
// Ensure file does not exist
|
||||||
|
@ -374,6 +390,10 @@ public void testDeleteDirectory() throws IOException {
|
||||||
"deleteTest/()&^%$#@!~_+}{><?", " ", "^ " };
|
"deleteTest/()&^%$#@!~_+}{><?", " ", "^ " };
|
||||||
|
|
||||||
for (String f : dirNames) {
|
for (String f : dirNames) {
|
||||||
|
if (!isTestableFileNameOnPlatform(f)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Create a file on fc2's file system using fc1
|
// Create a file on fc2's file system using fc1
|
||||||
Path testPath = qualifiedPath(f, fc2);
|
Path testPath = qualifiedPath(f, fc2);
|
||||||
// Ensure file does not exist
|
// Ensure file does not exist
|
||||||
|
@ -492,6 +512,10 @@ public void testListStatus() throws Exception {
|
||||||
ArrayList<Path> testDirs = new ArrayList<Path>();
|
ArrayList<Path> testDirs = new ArrayList<Path>();
|
||||||
|
|
||||||
for (String d : dirs) {
|
for (String d : dirs) {
|
||||||
|
if (!isTestableFileNameOnPlatform(d)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
testDirs.add(qualifiedPath(d, fc2));
|
testDirs.add(qualifiedPath(d, fc2));
|
||||||
}
|
}
|
||||||
Assert.assertFalse(exists(fc1, testDirs.get(0)));
|
Assert.assertFalse(exists(fc1, testDirs.get(0)));
|
||||||
|
@ -506,15 +530,17 @@ public void testListStatus() throws Exception {
|
||||||
Assert.assertEquals(qualifiedPath(hPrefix, fc1), paths[0].getPath());
|
Assert.assertEquals(qualifiedPath(hPrefix, fc1), paths[0].getPath());
|
||||||
|
|
||||||
paths = fc1.util().listStatus(qualifiedPath(hPrefix, fc1));
|
paths = fc1.util().listStatus(qualifiedPath(hPrefix, fc1));
|
||||||
Assert.assertEquals(6, paths.length);
|
Assert.assertEquals(testDirs.size(), paths.length);
|
||||||
for (int i = 0; i < dirs.length; i++) {
|
for (int i = 0; i < testDirs.size(); i++) {
|
||||||
boolean found = false;
|
boolean found = false;
|
||||||
for (int j = 0; j < paths.length; j++) {
|
for (int j = 0; j < paths.length; j++) {
|
||||||
if (qualifiedPath(dirs[i],fc1).equals(paths[j].getPath())) {
|
if (qualifiedPath(testDirs.get(i).toString(), fc1).equals(
|
||||||
|
paths[j].getPath())) {
|
||||||
|
|
||||||
found = true;
|
found = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Assert.assertTrue(dirs[i] + " not found", found);
|
Assert.assertTrue(testDirs.get(i) + " not found", found);
|
||||||
}
|
}
|
||||||
|
|
||||||
paths = fc1.util().listStatus(qualifiedPath(dirs[0], fc1));
|
paths = fc1.util().listStatus(qualifiedPath(dirs[0], fc1));
|
||||||
|
@ -539,9 +565,32 @@ public void testListStatus() throws Exception {
|
||||||
}
|
}
|
||||||
Assert.assertTrue(stat.getPath() + " not found", found);
|
Assert.assertTrue(stat.getPath() + " not found", found);
|
||||||
}
|
}
|
||||||
Assert.assertEquals(6, dirLen);
|
Assert.assertEquals(testDirs.size(), dirLen);
|
||||||
|
|
||||||
pathsItor = fc1.listStatus(qualifiedPath(dirs[0], fc1));
|
pathsItor = fc1.listStatus(qualifiedPath(dirs[0], fc1));
|
||||||
Assert.assertFalse(pathsItor.hasNext());
|
Assert.assertFalse(pathsItor.hasNext());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if the argument is a file name that is testable on the platform
|
||||||
|
* currently running the test. This is intended for use by tests so that they
|
||||||
|
* can skip checking file names that aren't supported by the underlying
|
||||||
|
* platform. The current implementation specifically checks for patterns that
|
||||||
|
* are not valid file names on Windows when the tests are running on Windows.
|
||||||
|
*
|
||||||
|
* @param fileName String file name to check
|
||||||
|
* @return boolean true if the argument is valid as a file name
|
||||||
|
*/
|
||||||
|
private static boolean isTestableFileNameOnPlatform(String fileName) {
|
||||||
|
boolean valid = true;
|
||||||
|
|
||||||
|
if (Shell.WINDOWS) {
|
||||||
|
// Disallow reserved characters: <, >, :, ", |, ?, *.
|
||||||
|
// Disallow trailing space or period.
|
||||||
|
// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
|
||||||
|
valid = !WIN_INVALID_FILE_NAME_PATTERN.matcher(fileName).matches();
|
||||||
|
}
|
||||||
|
|
||||||
|
return valid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ static String getAbsoluteTestRootDir(FileSystem fSys)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// NOTE: can't cache because of different filesystems!
|
// NOTE: can't cache because of different filesystems!
|
||||||
//if (absTestRootDir == null)
|
//if (absTestRootDir == null)
|
||||||
if (TEST_ROOT_DIR.startsWith("/")) {
|
if (new Path(TEST_ROOT_DIR).isAbsolute()) {
|
||||||
absTestRootDir = TEST_ROOT_DIR;
|
absTestRootDir = TEST_ROOT_DIR;
|
||||||
} else {
|
} else {
|
||||||
absTestRootDir = fSys.getWorkingDirectory().toString() + "/"
|
absTestRootDir = fSys.getWorkingDirectory().toString() + "/"
|
||||||
|
|
|
@ -43,13 +43,14 @@ public void setup() throws IOException {
|
||||||
fc = FileContext.getFileContext();
|
fc = FileContext.getFileContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFileContextResolveAfs() throws IOException {
|
public void testFileContextResolveAfs() throws IOException {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
localFs = FileSystem.get(conf);
|
localFs = FileSystem.get(conf);
|
||||||
|
|
||||||
Path localPath = new Path(TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs1");
|
Path localPath = new Path(TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs1");
|
||||||
Path linkPath = new Path("file://" + TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs2");
|
Path linkPath = localFs.makeQualified(new Path(TEST_ROOT_DIR_LOCAL,
|
||||||
|
"TestFileContextResolveAfs2"));
|
||||||
localFs.mkdirs(new Path(TEST_ROOT_DIR_LOCAL));
|
localFs.mkdirs(new Path(TEST_ROOT_DIR_LOCAL));
|
||||||
localFs.create(localPath);
|
localFs.create(localPath);
|
||||||
|
|
||||||
|
|
|
@ -20,16 +20,24 @@
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.jar.Attributes;
|
||||||
|
import java.util.jar.JarFile;
|
||||||
|
import java.util.jar.Manifest;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -121,7 +129,7 @@ private void createFile(File directory, String name, String contents)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testListFiles() throws IOException {
|
public void testListFiles() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
//Test existing files case
|
//Test existing files case
|
||||||
|
@ -148,7 +156,7 @@ public void testListFiles() throws IOException {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testListAPI() throws IOException {
|
public void testListAPI() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
//Test existing files case
|
//Test existing files case
|
||||||
|
@ -196,7 +204,7 @@ private void cleanupImpl() throws IOException {
|
||||||
Assert.assertTrue(!partitioned.exists());
|
Assert.assertTrue(!partitioned.exists());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFullyDelete() throws IOException {
|
public void testFullyDelete() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
boolean ret = FileUtil.fullyDelete(del);
|
boolean ret = FileUtil.fullyDelete(del);
|
||||||
|
@ -211,7 +219,7 @@ public void testFullyDelete() throws IOException {
|
||||||
* (b) symlink to dir only and not the dir pointed to by symlink.
|
* (b) symlink to dir only and not the dir pointed to by symlink.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFullyDeleteSymlinks() throws IOException {
|
public void testFullyDeleteSymlinks() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
|
|
||||||
|
@ -241,7 +249,7 @@ public void testFullyDeleteSymlinks() throws IOException {
|
||||||
* (b) dangling symlink to directory properly
|
* (b) dangling symlink to directory properly
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFullyDeleteDanglingSymlinks() throws IOException {
|
public void testFullyDeleteDanglingSymlinks() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
// delete the directory tmp to make tmpDir a dangling link to dir tmp and
|
// delete the directory tmp to make tmpDir a dangling link to dir tmp and
|
||||||
|
@ -268,7 +276,7 @@ public void testFullyDeleteDanglingSymlinks() throws IOException {
|
||||||
Assert.assertEquals(3, del.list().length);
|
Assert.assertEquals(3, del.list().length);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFullyDeleteContents() throws IOException {
|
public void testFullyDeleteContents() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
boolean ret = FileUtil.fullyDeleteContents(del);
|
boolean ret = FileUtil.fullyDeleteContents(del);
|
||||||
|
@ -384,15 +392,19 @@ private void validateAndSetWritablePermissions(
|
||||||
zlink.exists());
|
zlink.exists());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFailFullyDelete() throws IOException {
|
public void testFailFullyDelete() throws IOException {
|
||||||
|
if(Shell.WINDOWS) {
|
||||||
|
// windows Dir.setWritable(false) does not work for directories
|
||||||
|
return;
|
||||||
|
}
|
||||||
LOG.info("Running test to verify failure of fullyDelete()");
|
LOG.info("Running test to verify failure of fullyDelete()");
|
||||||
setupDirsAndNonWritablePermissions();
|
setupDirsAndNonWritablePermissions();
|
||||||
boolean ret = FileUtil.fullyDelete(new MyFile(del));
|
boolean ret = FileUtil.fullyDelete(new MyFile(del));
|
||||||
validateAndSetWritablePermissions(true, ret);
|
validateAndSetWritablePermissions(true, ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFailFullyDeleteGrantPermissions() throws IOException {
|
public void testFailFullyDeleteGrantPermissions() throws IOException {
|
||||||
setupDirsAndNonWritablePermissions();
|
setupDirsAndNonWritablePermissions();
|
||||||
boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
|
boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
|
||||||
|
@ -461,15 +473,19 @@ public File[] listFiles() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFailFullyDeleteContents() throws IOException {
|
public void testFailFullyDeleteContents() throws IOException {
|
||||||
|
if(Shell.WINDOWS) {
|
||||||
|
// windows Dir.setWritable(false) does not work for directories
|
||||||
|
return;
|
||||||
|
}
|
||||||
LOG.info("Running test to verify failure of fullyDeleteContents()");
|
LOG.info("Running test to verify failure of fullyDeleteContents()");
|
||||||
setupDirsAndNonWritablePermissions();
|
setupDirsAndNonWritablePermissions();
|
||||||
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
|
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
|
||||||
validateAndSetWritablePermissions(true, ret);
|
validateAndSetWritablePermissions(true, ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
|
public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
|
||||||
setupDirsAndNonWritablePermissions();
|
setupDirsAndNonWritablePermissions();
|
||||||
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
|
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
|
||||||
|
@ -477,7 +493,7 @@ public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
|
||||||
validateAndSetWritablePermissions(false, ret);
|
validateAndSetWritablePermissions(false, ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCopyMergeSingleDirectory() throws IOException {
|
public void testCopyMergeSingleDirectory() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
|
boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
|
||||||
|
@ -536,7 +552,7 @@ private boolean copyMerge(String src, String dst)
|
||||||
* and that directory sizes are not added to the final calculated size
|
* and that directory sizes are not added to the final calculated size
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetDU() throws IOException {
|
public void testGetDU() throws IOException {
|
||||||
setupDirs();
|
setupDirs();
|
||||||
|
|
||||||
|
@ -546,4 +562,69 @@ public void testGetDU() throws IOException {
|
||||||
long expected = 2 * (3 + System.getProperty("line.separator").length());
|
long expected = 2 * (3 + System.getProperty("line.separator").length());
|
||||||
Assert.assertEquals(expected, du);
|
Assert.assertEquals(expected, du);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testCreateJarWithClassPath() throws Exception {
|
||||||
|
// setup test directory for files
|
||||||
|
Assert.assertFalse(tmp.exists());
|
||||||
|
Assert.assertTrue(tmp.mkdirs());
|
||||||
|
|
||||||
|
// create files expected to match a wildcard
|
||||||
|
List<File> wildcardMatches = Arrays.asList(new File(tmp, "wildcard1.jar"),
|
||||||
|
new File(tmp, "wildcard2.jar"), new File(tmp, "wildcard3.JAR"),
|
||||||
|
new File(tmp, "wildcard4.JAR"));
|
||||||
|
for (File wildcardMatch: wildcardMatches) {
|
||||||
|
Assert.assertTrue("failure creating file: " + wildcardMatch,
|
||||||
|
wildcardMatch.createNewFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
// create non-jar files, which we expect to not be included in the classpath
|
||||||
|
Assert.assertTrue(new File(tmp, "text.txt").createNewFile());
|
||||||
|
Assert.assertTrue(new File(tmp, "executable.exe").createNewFile());
|
||||||
|
Assert.assertTrue(new File(tmp, "README").createNewFile());
|
||||||
|
|
||||||
|
// create classpath jar
|
||||||
|
String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
|
||||||
|
List<String> classPaths = Arrays.asList("cp1.jar", "cp2.jar", wildcardPath,
|
||||||
|
"cp3.jar");
|
||||||
|
String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
|
||||||
|
String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
|
||||||
|
new Path(tmp.getCanonicalPath()));
|
||||||
|
|
||||||
|
// verify classpath by reading manifest from jar file
|
||||||
|
JarFile jarFile = null;
|
||||||
|
try {
|
||||||
|
jarFile = new JarFile(classPathJar);
|
||||||
|
Manifest jarManifest = jarFile.getManifest();
|
||||||
|
Assert.assertNotNull(jarManifest);
|
||||||
|
Attributes mainAttributes = jarManifest.getMainAttributes();
|
||||||
|
Assert.assertNotNull(mainAttributes);
|
||||||
|
Assert.assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH));
|
||||||
|
String classPathAttr = mainAttributes.getValue(Attributes.Name.CLASS_PATH);
|
||||||
|
Assert.assertNotNull(classPathAttr);
|
||||||
|
List<String> expectedClassPaths = new ArrayList<String>();
|
||||||
|
for (String classPath: classPaths) {
|
||||||
|
if (!wildcardPath.equals(classPath)) {
|
||||||
|
expectedClassPaths.add(new File(classPath).toURI().toURL()
|
||||||
|
.toExternalForm());
|
||||||
|
} else {
|
||||||
|
// add wildcard matches
|
||||||
|
for (File wildcardMatch: wildcardMatches) {
|
||||||
|
expectedClassPaths.add(wildcardMatch.toURI().toURL()
|
||||||
|
.toExternalForm());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));
|
||||||
|
Assert.assertEquals(expectedClassPaths, actualClassPaths);
|
||||||
|
} finally {
|
||||||
|
if (jarFile != null) {
|
||||||
|
try {
|
||||||
|
jarFile.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
LOG.warn("exception closing jarFile: " + classPathJar, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -117,20 +117,22 @@ private void change(int exit, String owner, String group, String...files)
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testChmod() throws Exception {
|
public void testChmod() throws Exception {
|
||||||
|
Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists");
|
||||||
|
|
||||||
final String f1 = TEST_ROOT_DIR + "/" + "testChmod/fileExists";
|
final String f1 = p1.toUri().getPath();
|
||||||
final String f2 = TEST_ROOT_DIR + "/" + "testChmod/fileDoesNotExist";
|
final String f2 = new Path(TEST_ROOT_DIR, "testChmod/fileDoesNotExist")
|
||||||
final String f3 = TEST_ROOT_DIR + "/" + "testChmod/nonExistingfiles*";
|
.toUri().getPath();
|
||||||
|
final String f3 = new Path(TEST_ROOT_DIR, "testChmod/nonExistingfiles*")
|
||||||
|
.toUri().getPath();
|
||||||
|
|
||||||
Path p1 = new Path(f1);
|
final Path p4 = new Path(TEST_ROOT_DIR, "testChmod/file1");
|
||||||
|
final Path p5 = new Path(TEST_ROOT_DIR, "testChmod/file2");
|
||||||
|
final Path p6 = new Path(TEST_ROOT_DIR, "testChmod/file3");
|
||||||
|
|
||||||
final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file1");
|
final String f7 = new Path(TEST_ROOT_DIR, "testChmod/file*").toUri()
|
||||||
final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file2");
|
.getPath();
|
||||||
final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file3");
|
|
||||||
|
|
||||||
final String f7 = TEST_ROOT_DIR + "/" + "testChmod/file*";
|
|
||||||
|
|
||||||
// create and write test file
|
// create and write test file
|
||||||
writeFile(fileSys, p1);
|
writeFile(fileSys, p1);
|
||||||
|
@ -171,20 +173,23 @@ public void testChmod() throws Exception {
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testChown() throws Exception {
|
public void testChown() throws Exception {
|
||||||
|
Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists");
|
||||||
|
|
||||||
final String f1 = TEST_ROOT_DIR + "/" + "testChown/fileExists";
|
final String f1 = p1.toUri().getPath();
|
||||||
final String f2 = TEST_ROOT_DIR + "/" + "testChown/fileDoesNotExist";
|
final String f2 = new Path(TEST_ROOT_DIR, "testChown/fileDoesNotExist")
|
||||||
final String f3 = TEST_ROOT_DIR + "/" + "testChown/nonExistingfiles*";
|
.toUri().getPath();
|
||||||
|
final String f3 = new Path(TEST_ROOT_DIR, "testChown/nonExistingfiles*")
|
||||||
|
.toUri().getPath();
|
||||||
|
|
||||||
Path p1 = new Path(f1);
|
|
||||||
|
|
||||||
final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChown/file1");
|
final Path p4 = new Path(TEST_ROOT_DIR, "testChown/file1");
|
||||||
final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChown/file2");
|
final Path p5 = new Path(TEST_ROOT_DIR, "testChown/file2");
|
||||||
final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChown/file3");
|
final Path p6 = new Path(TEST_ROOT_DIR, "testChown/file3");
|
||||||
|
|
||||||
final String f7 = TEST_ROOT_DIR + "/" + "testChown/file*";
|
final String f7 = new Path(TEST_ROOT_DIR, "testChown/file*").toUri()
|
||||||
|
.getPath();
|
||||||
|
|
||||||
// create and write test file
|
// create and write test file
|
||||||
writeFile(fileSys, p1);
|
writeFile(fileSys, p1);
|
||||||
|
@ -224,20 +229,22 @@ public void testChown() throws Exception {
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testChgrp() throws Exception {
|
public void testChgrp() throws Exception {
|
||||||
|
Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists");
|
||||||
|
|
||||||
final String f1 = TEST_ROOT_DIR + "/" + "testChgrp/fileExists";
|
final String f1 = p1.toUri().getPath();
|
||||||
final String f2 = TEST_ROOT_DIR + "/" + "testChgrp/fileDoesNotExist";
|
final String f2 = new Path(TEST_ROOT_DIR, "testChgrp/fileDoesNotExist")
|
||||||
final String f3 = TEST_ROOT_DIR + "/" + "testChgrp/nonExistingfiles*";
|
.toUri().getPath();
|
||||||
|
final String f3 = new Path(TEST_ROOT_DIR, "testChgrp/nonExistingfiles*")
|
||||||
|
.toUri().getPath();
|
||||||
|
|
||||||
Path p1 = new Path(f1);
|
final Path p4 = new Path(TEST_ROOT_DIR, "testChgrp/file1");
|
||||||
|
final Path p5 = new Path(TEST_ROOT_DIR, "testChgrp/file2");
|
||||||
|
final Path p6 = new Path(TEST_ROOT_DIR, "testChgrp/file3");
|
||||||
|
|
||||||
final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file1");
|
final String f7 = new Path(TEST_ROOT_DIR, "testChgrp/file*").toUri()
|
||||||
final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file2");
|
.getPath();
|
||||||
final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file3");
|
|
||||||
|
|
||||||
final String f7 = TEST_ROOT_DIR + "/" + "testChgrp/file*";
|
|
||||||
|
|
||||||
// create and write test file
|
// create and write test file
|
||||||
writeFile(fileSys, p1);
|
writeFile(fileSys, p1);
|
||||||
|
@ -267,7 +274,7 @@ public void testChgrp() throws Exception {
|
||||||
change(1, null, "admin", f2, f7);
|
change(1, null, "admin", f2, f7);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
|
public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
@ -284,8 +291,8 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
|
||||||
fileSys.mkdirs(tdir);
|
fileSys.mkdirs(tdir);
|
||||||
String[] args = new String[3];
|
String[] args = new String[3];
|
||||||
args[0] = "-get";
|
args[0] = "-get";
|
||||||
args[1] = tdir+"/invalidSrc";
|
args[1] = new Path(tdir.toUri().getPath(), "/invalidSrc").toString();
|
||||||
args[2] = tdir+"/invalidDst";
|
args[2] = new Path(tdir.toUri().getPath(), "/invalidDst").toString();
|
||||||
assertTrue("file exists", !fileSys.exists(new Path(args[1])));
|
assertTrue("file exists", !fileSys.exists(new Path(args[1])));
|
||||||
assertTrue("file exists", !fileSys.exists(new Path(args[2])));
|
assertTrue("file exists", !fileSys.exists(new Path(args[2])));
|
||||||
int run = shell.run(args);
|
int run = shell.run(args);
|
||||||
|
@ -299,7 +306,7 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testRmWithNonexistentGlob() throws Exception {
|
public void testRmWithNonexistentGlob() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
FsShell shell = new FsShell();
|
FsShell shell = new FsShell();
|
||||||
|
@ -320,7 +327,7 @@ public void testRmWithNonexistentGlob() throws Exception {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testRmForceWithNonexistentGlob() throws Exception {
|
public void testRmForceWithNonexistentGlob() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
FsShell shell = new FsShell();
|
FsShell shell = new FsShell();
|
||||||
|
@ -339,7 +346,7 @@ public void testRmForceWithNonexistentGlob() throws Exception {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testInvalidDefaultFS() throws Exception {
|
public void testInvalidDefaultFS() throws Exception {
|
||||||
// if default fs doesn't exist or is invalid, but the path provided in
|
// if default fs doesn't exist or is invalid, but the path provided in
|
||||||
// arguments is valid - fsshell should work
|
// arguments is valid - fsshell should work
|
||||||
|
|
|
@ -54,17 +54,6 @@
|
||||||
* NOTICE: This test class only tests the functionality of the OS
|
* NOTICE: This test class only tests the functionality of the OS
|
||||||
* upon which the test is run! (although you're pretty safe with the
|
* upon which the test is run! (although you're pretty safe with the
|
||||||
* unix-like OS's, unless a typo sneaks in.)
|
* unix-like OS's, unless a typo sneaks in.)
|
||||||
*
|
|
||||||
* Notes about Windows testing:
|
|
||||||
* (a) In order to create hardlinks, the process must be run with
|
|
||||||
* administrative privs, in both the account AND the invocation.
|
|
||||||
* For instance, to run within Eclipse, the Eclipse application must be
|
|
||||||
* launched by right-clicking on it, and selecting "Run as Administrator"
|
|
||||||
* (and that option will only be available if the current user id does
|
|
||||||
* in fact have admin privs).
|
|
||||||
* (b) The getLinkCount() test case will fail for Windows, unless Cygwin
|
|
||||||
* is set up properly. In particular, ${cygwin}/bin must be in
|
|
||||||
* the PATH environment variable, so the cygwin utilities can be found.
|
|
||||||
*/
|
*/
|
||||||
public class TestHardLink {
|
public class TestHardLink {
|
||||||
|
|
||||||
|
@ -221,9 +210,6 @@ private String fetchFileContents(File file)
|
||||||
* Sanity check the simplest case of HardLink.getLinkCount()
|
* Sanity check the simplest case of HardLink.getLinkCount()
|
||||||
* to make sure we get back "1" for ordinary single-linked files.
|
* to make sure we get back "1" for ordinary single-linked files.
|
||||||
* Tests with multiply-linked files are in later test cases.
|
* Tests with multiply-linked files are in later test cases.
|
||||||
*
|
|
||||||
* If this fails on Windows but passes on Unix, the most likely cause is
|
|
||||||
* incorrect configuration of the Cygwin installation; see above.
|
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testGetLinkCount() throws IOException {
|
public void testGetLinkCount() throws IOException {
|
||||||
|
@ -412,7 +398,7 @@ class win extends HardLinkCGWin {};
|
||||||
assertEquals(5, win.hardLinkCommand.length);
|
assertEquals(5, win.hardLinkCommand.length);
|
||||||
assertEquals(7, win.hardLinkMultPrefix.length);
|
assertEquals(7, win.hardLinkMultPrefix.length);
|
||||||
assertEquals(8, win.hardLinkMultSuffix.length);
|
assertEquals(8, win.hardLinkMultSuffix.length);
|
||||||
assertEquals(3, win.getLinkCountCommand.length);
|
assertEquals(4, win.getLinkCountCommand.length);
|
||||||
|
|
||||||
assertTrue(win.hardLinkMultPrefix[4].equals("%f"));
|
assertTrue(win.hardLinkMultPrefix[4].equals("%f"));
|
||||||
//make sure "%f" was not munged
|
//make sure "%f" was not munged
|
||||||
|
@ -423,7 +409,7 @@ class win extends HardLinkCGWin {};
|
||||||
assertTrue(win.hardLinkMultSuffix[7].equals("1>NUL"));
|
assertTrue(win.hardLinkMultSuffix[7].equals("1>NUL"));
|
||||||
//make sure "1>NUL" was not munged
|
//make sure "1>NUL" was not munged
|
||||||
assertEquals(5, ("1>NUL").length());
|
assertEquals(5, ("1>NUL").length());
|
||||||
assertTrue(win.getLinkCountCommand[1].equals("-c%h"));
|
assertTrue(win.getLinkCountCommand[1].equals("hardlink"));
|
||||||
//make sure "-c%h" was not munged
|
//make sure "-c%h" was not munged
|
||||||
assertEquals(4, ("-c%h").length());
|
assertEquals(4, ("-c%h").length());
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,7 +129,7 @@ private String buildBufferDir(String dir, int i) {
|
||||||
* The second dir exists & is RW
|
* The second dir exists & is RW
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void test0() throws Exception {
|
public void test0() throws Exception {
|
||||||
if (isWindows) return;
|
if (isWindows) return;
|
||||||
String dir0 = buildBufferDir(ROOT, 0);
|
String dir0 = buildBufferDir(ROOT, 0);
|
||||||
|
@ -141,7 +141,8 @@ public void test0() throws Exception {
|
||||||
validateTempDirCreation(dir1);
|
validateTempDirCreation(dir1);
|
||||||
validateTempDirCreation(dir1);
|
validateTempDirCreation(dir1);
|
||||||
} finally {
|
} finally {
|
||||||
Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
|
Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
|
||||||
|
BUFFER_DIR_ROOT));
|
||||||
rmBufferDirs();
|
rmBufferDirs();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,7 +151,7 @@ public void test0() throws Exception {
|
||||||
* The second dir exists & is RW
|
* The second dir exists & is RW
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testROBufferDirAndRWBufferDir() throws Exception {
|
public void testROBufferDirAndRWBufferDir() throws Exception {
|
||||||
if (isWindows) return;
|
if (isWindows) return;
|
||||||
String dir1 = buildBufferDir(ROOT, 1);
|
String dir1 = buildBufferDir(ROOT, 1);
|
||||||
|
@ -162,14 +163,15 @@ public void testROBufferDirAndRWBufferDir() throws Exception {
|
||||||
validateTempDirCreation(dir2);
|
validateTempDirCreation(dir2);
|
||||||
validateTempDirCreation(dir2);
|
validateTempDirCreation(dir2);
|
||||||
} finally {
|
} finally {
|
||||||
Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
|
Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
|
||||||
|
BUFFER_DIR_ROOT));
|
||||||
rmBufferDirs();
|
rmBufferDirs();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/** Two buffer dirs. Both do not exist but on a RW disk.
|
/** Two buffer dirs. Both do not exist but on a RW disk.
|
||||||
* Check if tmp dirs are allocated in a round-robin
|
* Check if tmp dirs are allocated in a round-robin
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testDirsNotExist() throws Exception {
|
public void testDirsNotExist() throws Exception {
|
||||||
if (isWindows) return;
|
if (isWindows) return;
|
||||||
String dir2 = buildBufferDir(ROOT, 2);
|
String dir2 = buildBufferDir(ROOT, 2);
|
||||||
|
@ -195,7 +197,7 @@ public void testDirsNotExist() throws Exception {
|
||||||
* Later disk1 becomes read-only.
|
* Later disk1 becomes read-only.
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testRWBufferDirBecomesRO() throws Exception {
|
public void testRWBufferDirBecomesRO() throws Exception {
|
||||||
if (isWindows) return;
|
if (isWindows) return;
|
||||||
String dir3 = buildBufferDir(ROOT, 3);
|
String dir3 = buildBufferDir(ROOT, 3);
|
||||||
|
@ -233,7 +235,7 @@ public void testRWBufferDirBecomesRO() throws Exception {
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
static final int TRIALS = 100;
|
static final int TRIALS = 100;
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCreateManyFiles() throws Exception {
|
public void testCreateManyFiles() throws Exception {
|
||||||
if (isWindows) return;
|
if (isWindows) return;
|
||||||
String dir5 = buildBufferDir(ROOT, 5);
|
String dir5 = buildBufferDir(ROOT, 5);
|
||||||
|
@ -270,7 +272,7 @@ public void testCreateManyFiles() throws Exception {
|
||||||
* directory. With checkAccess true, the directory should not be created.
|
* directory. With checkAccess true, the directory should not be created.
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testLocalPathForWriteDirCreation() throws IOException {
|
public void testLocalPathForWriteDirCreation() throws IOException {
|
||||||
String dir0 = buildBufferDir(ROOT, 0);
|
String dir0 = buildBufferDir(ROOT, 0);
|
||||||
String dir1 = buildBufferDir(ROOT, 1);
|
String dir1 = buildBufferDir(ROOT, 1);
|
||||||
|
@ -291,7 +293,8 @@ public void testLocalPathForWriteDirCreation() throws IOException {
|
||||||
assertEquals(e.getClass(), FileNotFoundException.class);
|
assertEquals(e.getClass(), FileNotFoundException.class);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
|
Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
|
||||||
|
BUFFER_DIR_ROOT));
|
||||||
rmBufferDirs();
|
rmBufferDirs();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -302,7 +305,7 @@ public void testLocalPathForWriteDirCreation() throws IOException {
|
||||||
* are mistakenly created from fully qualified path strings.
|
* are mistakenly created from fully qualified path strings.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testNoSideEffects() throws IOException {
|
public void testNoSideEffects() throws IOException {
|
||||||
assumeTrue(!isWindows);
|
assumeTrue(!isWindows);
|
||||||
String dir = buildBufferDir(ROOT, 0);
|
String dir = buildBufferDir(ROOT, 0);
|
||||||
|
@ -313,7 +316,8 @@ public void testNoSideEffects() throws IOException {
|
||||||
assertTrue(result.getParentFile().delete());
|
assertTrue(result.getParentFile().delete());
|
||||||
assertFalse(new File(dir).exists());
|
assertFalse(new File(dir).exists());
|
||||||
} finally {
|
} finally {
|
||||||
Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
|
Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
|
||||||
|
BUFFER_DIR_ROOT));
|
||||||
rmBufferDirs();
|
rmBufferDirs();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -323,7 +327,7 @@ public void testNoSideEffects() throws IOException {
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetLocalPathToRead() throws IOException {
|
public void testGetLocalPathToRead() throws IOException {
|
||||||
assumeTrue(!isWindows);
|
assumeTrue(!isWindows);
|
||||||
String dir = buildBufferDir(ROOT, 0);
|
String dir = buildBufferDir(ROOT, 0);
|
||||||
|
@ -336,7 +340,8 @@ public void testGetLocalPathToRead() throws IOException {
|
||||||
assertEquals(f1.getName(), p1.getName());
|
assertEquals(f1.getName(), p1.getName());
|
||||||
assertEquals("file", p1.getFileSystem(conf).getUri().getScheme());
|
assertEquals("file", p1.getFileSystem(conf).getUri().getScheme());
|
||||||
} finally {
|
} finally {
|
||||||
Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
|
Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
|
||||||
|
BUFFER_DIR_ROOT));
|
||||||
rmBufferDirs();
|
rmBufferDirs();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -347,7 +352,7 @@ public void testGetLocalPathToRead() throws IOException {
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetAllLocalPathsToRead() throws IOException {
|
public void testGetAllLocalPathsToRead() throws IOException {
|
||||||
assumeTrue(!isWindows);
|
assumeTrue(!isWindows);
|
||||||
|
|
||||||
|
@ -395,7 +400,7 @@ public void testGetAllLocalPathsToRead() throws IOException {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testRemoveContext() throws IOException {
|
public void testRemoveContext() throws IOException {
|
||||||
String dir = buildBufferDir(ROOT, 0);
|
String dir = buildBufferDir(ROOT, 0);
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.fs;
|
package org.apache.hadoop.fs;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
@ -25,10 +26,14 @@
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.AvroTestUtil;
|
import org.apache.hadoop.io.AvroTestUtil;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class TestPath extends TestCase {
|
public class TestPath extends TestCase {
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testToString() {
|
public void testToString() {
|
||||||
toStringTest("/");
|
toStringTest("/");
|
||||||
toStringTest("/foo");
|
toStringTest("/foo");
|
||||||
|
@ -61,6 +66,7 @@ private void toStringTest(String pathString) {
|
||||||
assertEquals(pathString, new Path(pathString).toString());
|
assertEquals(pathString, new Path(pathString).toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testNormalize() throws URISyntaxException {
|
public void testNormalize() throws URISyntaxException {
|
||||||
assertEquals("", new Path(".").toString());
|
assertEquals("", new Path(".").toString());
|
||||||
assertEquals("..", new Path("..").toString());
|
assertEquals("..", new Path("..").toString());
|
||||||
|
@ -82,6 +88,7 @@ public void testNormalize() throws URISyntaxException {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testIsAbsolute() {
|
public void testIsAbsolute() {
|
||||||
assertTrue(new Path("/").isAbsolute());
|
assertTrue(new Path("/").isAbsolute());
|
||||||
assertTrue(new Path("/foo").isAbsolute());
|
assertTrue(new Path("/foo").isAbsolute());
|
||||||
|
@ -94,6 +101,7 @@ public void testIsAbsolute() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testParent() {
|
public void testParent() {
|
||||||
assertEquals(new Path("/foo"), new Path("/foo/bar").getParent());
|
assertEquals(new Path("/foo"), new Path("/foo/bar").getParent());
|
||||||
assertEquals(new Path("foo"), new Path("foo/bar").getParent());
|
assertEquals(new Path("foo"), new Path("foo/bar").getParent());
|
||||||
|
@ -104,6 +112,7 @@ public void testParent() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testChild() {
|
public void testChild() {
|
||||||
assertEquals(new Path("."), new Path(".", "."));
|
assertEquals(new Path("."), new Path(".", "."));
|
||||||
assertEquals(new Path("/"), new Path("/", "."));
|
assertEquals(new Path("/"), new Path("/", "."));
|
||||||
|
@ -123,10 +132,12 @@ public void testChild() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testEquals() {
|
public void testEquals() {
|
||||||
assertFalse(new Path("/").equals(new Path("/foo")));
|
assertFalse(new Path("/").equals(new Path("/foo")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testDots() {
|
public void testDots() {
|
||||||
// Test Path(String)
|
// Test Path(String)
|
||||||
assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz");
|
assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz");
|
||||||
|
@ -164,18 +175,54 @@ public void testDots() {
|
||||||
assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
|
assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Test that Windows paths are correctly handled */
|
||||||
|
@Test (timeout = 5000)
|
||||||
|
public void testWindowsPaths() throws URISyntaxException, IOException {
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
|
||||||
|
assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
|
||||||
|
assertEquals(new Path("/c:/foo/bar").toString(), "c:/foo/bar");
|
||||||
|
assertEquals(new Path("file://c:/foo/bar").toString(), "file://c:/foo/bar");
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Test invalid paths on Windows are correctly rejected */
|
||||||
|
@Test (timeout = 5000)
|
||||||
|
public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String [] invalidPaths = {
|
||||||
|
"hdfs:\\\\\\tmp"
|
||||||
|
};
|
||||||
|
|
||||||
|
for (String path : invalidPaths) {
|
||||||
|
try {
|
||||||
|
Path item = new Path(path);
|
||||||
|
fail("Did not throw for invalid path " + path);
|
||||||
|
} catch (IllegalArgumentException iae) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Test Path objects created from other Path objects */
|
/** Test Path objects created from other Path objects */
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testChildParentResolution() throws URISyntaxException, IOException {
|
public void testChildParentResolution() throws URISyntaxException, IOException {
|
||||||
Path parent = new Path("foo1://bar1/baz1");
|
Path parent = new Path("foo1://bar1/baz1");
|
||||||
Path child = new Path("foo2://bar2/baz2");
|
Path child = new Path("foo2://bar2/baz2");
|
||||||
assertEquals(child, new Path(parent, child));
|
assertEquals(child, new Path(parent, child));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testScheme() throws java.io.IOException {
|
public void testScheme() throws java.io.IOException {
|
||||||
assertEquals("foo:/bar", new Path("foo:/","/bar").toString());
|
assertEquals("foo:/bar", new Path("foo:/","/bar").toString());
|
||||||
assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString());
|
assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testURI() throws URISyntaxException, IOException {
|
public void testURI() throws URISyntaxException, IOException {
|
||||||
URI uri = new URI("file:///bar#baz");
|
URI uri = new URI("file:///bar#baz");
|
||||||
Path path = new Path(uri);
|
Path path = new Path(uri);
|
||||||
|
@ -198,6 +245,7 @@ public void testURI() throws URISyntaxException, IOException {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Test URIs created from Path objects */
|
/** Test URIs created from Path objects */
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testPathToUriConversion() throws URISyntaxException, IOException {
|
public void testPathToUriConversion() throws URISyntaxException, IOException {
|
||||||
// Path differs from URI in that it ignores the query part..
|
// Path differs from URI in that it ignores the query part..
|
||||||
assertEquals(new URI(null, null, "/foo?bar", null, null), new Path("/foo?bar").toUri());
|
assertEquals(new URI(null, null, "/foo?bar", null, null), new Path("/foo?bar").toUri());
|
||||||
|
@ -218,6 +266,7 @@ public void testPathToUriConversion() throws URISyntaxException, IOException {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Test reserved characters in URIs (and therefore Paths) */
|
/** Test reserved characters in URIs (and therefore Paths) */
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testReservedCharacters() throws URISyntaxException, IOException {
|
public void testReservedCharacters() throws URISyntaxException, IOException {
|
||||||
// URI encodes the path
|
// URI encodes the path
|
||||||
assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).getRawPath());
|
assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).getRawPath());
|
||||||
|
@ -239,6 +288,7 @@ public void testReservedCharacters() throws URISyntaxException, IOException {
|
||||||
assertEquals("/foo%3Fbar", new URI("http", "localhost", "/foo?bar", null, null).toURL().getPath());
|
assertEquals("/foo%3Fbar", new URI("http", "localhost", "/foo?bar", null, null).toURL().getPath());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testMakeQualified() throws URISyntaxException {
|
public void testMakeQualified() throws URISyntaxException {
|
||||||
URI defaultUri = new URI("hdfs://host1/dir1");
|
URI defaultUri = new URI("hdfs://host1/dir1");
|
||||||
URI wd = new URI("hdfs://host2/dir2");
|
URI wd = new URI("hdfs://host2/dir2");
|
||||||
|
@ -252,6 +302,7 @@ public void testMakeQualified() throws URISyntaxException {
|
||||||
new Path("file").makeQualified(defaultUri, new Path(wd)));
|
new Path("file").makeQualified(defaultUri, new Path(wd)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testGetName() {
|
public void testGetName() {
|
||||||
assertEquals("", new Path("/").getName());
|
assertEquals("", new Path("/").getName());
|
||||||
assertEquals("foo", new Path("foo").getName());
|
assertEquals("foo", new Path("foo").getName());
|
||||||
|
@ -261,13 +312,17 @@ public void testGetName() {
|
||||||
assertEquals("bar", new Path("hdfs://host/foo/bar").getName());
|
assertEquals("bar", new Path("hdfs://host/foo/bar").getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testAvroReflect() throws Exception {
|
public void testAvroReflect() throws Exception {
|
||||||
AvroTestUtil.testReflect
|
AvroTestUtil.testReflect
|
||||||
(new Path("foo"),
|
(new Path("foo"),
|
||||||
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
|
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testGlobEscapeStatus() throws Exception {
|
public void testGlobEscapeStatus() throws Exception {
|
||||||
|
// This test is not meaningful on Windows where * is disallowed in file name.
|
||||||
|
if (Shell.WINDOWS) return;
|
||||||
FileSystem lfs = FileSystem.getLocal(new Configuration());
|
FileSystem lfs = FileSystem.getLocal(new Configuration());
|
||||||
Path testRoot = lfs.makeQualified(new Path(
|
Path testRoot = lfs.makeQualified(new Path(
|
||||||
System.getProperty("test.build.data","test/build/data"),
|
System.getProperty("test.build.data","test/build/data"),
|
||||||
|
@ -324,4 +379,31 @@ public void testGlobEscapeStatus() throws Exception {
|
||||||
assertEquals(1, stats.length);
|
assertEquals(1, stats.length);
|
||||||
assertEquals(new Path(testRoot, "*/f"), stats[0].getPath());
|
assertEquals(new Path(testRoot, "*/f"), stats[0].getPath());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testMergePaths() {
|
||||||
|
assertEquals(new Path("/foo/bar"),
|
||||||
|
Path.mergePaths(new Path("/foo"),
|
||||||
|
new Path("/bar")));
|
||||||
|
|
||||||
|
assertEquals(new Path("/foo/bar/baz"),
|
||||||
|
Path.mergePaths(new Path("/foo/bar"),
|
||||||
|
new Path("/baz")));
|
||||||
|
|
||||||
|
assertEquals(new Path("/foo/bar/baz"),
|
||||||
|
Path.mergePaths(new Path("/foo"),
|
||||||
|
new Path("/bar/baz")));
|
||||||
|
|
||||||
|
assertEquals(new Path(Shell.WINDOWS ? "/C:/foo/bar" : "/C:/foo/C:/bar"),
|
||||||
|
Path.mergePaths(new Path("/C:/foo"),
|
||||||
|
new Path("/C:/bar")));
|
||||||
|
|
||||||
|
assertEquals(new Path("viewfs:///foo/bar"),
|
||||||
|
Path.mergePaths(new Path("viewfs:///foo"),
|
||||||
|
new Path("file:///bar")));
|
||||||
|
|
||||||
|
assertEquals(new Path("viewfs://vfsauthority/foo/bar"),
|
||||||
|
Path.mergePaths(new Path("viewfs://vfsauthority/foo"),
|
||||||
|
new Path("file://fileauthority/bar")));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,7 @@ protected static Path mkdir(FileSystem fs, Path p) throws IOException {
|
||||||
// check that the specified file is in Trash
|
// check that the specified file is in Trash
|
||||||
protected static void checkTrash(FileSystem trashFs, Path trashRoot,
|
protected static void checkTrash(FileSystem trashFs, Path trashRoot,
|
||||||
Path path) throws IOException {
|
Path path) throws IOException {
|
||||||
Path p = new Path(trashRoot+"/"+ path.toUri().getPath());
|
Path p = Path.mergePaths(trashRoot, path);
|
||||||
assertTrue("Could not find file in trash: "+ p , trashFs.exists(p));
|
assertTrue("Could not find file in trash: "+ p , trashFs.exists(p));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,7 +399,8 @@ public static void trashShell(final Configuration conf, final Path base,
|
||||||
assertTrue(val==0);
|
assertTrue(val==0);
|
||||||
}
|
}
|
||||||
// current trash directory
|
// current trash directory
|
||||||
Path trashDir = new Path(trashRoot.toUri().getPath() + myFile.getParent().toUri().getPath());
|
Path trashDir = Path.mergePaths(new Path(trashRoot.toUri().getPath()),
|
||||||
|
new Path(myFile.getParent().toUri().getPath()));
|
||||||
|
|
||||||
System.out.println("Deleting same myFile: myFile.parent=" + myFile.getParent().toUri().getPath() +
|
System.out.println("Deleting same myFile: myFile.parent=" + myFile.getParent().toUri().getPath() +
|
||||||
"; trashroot="+trashRoot.toUri().getPath() +
|
"; trashroot="+trashRoot.toUri().getPath() +
|
||||||
|
|
|
@ -19,8 +19,10 @@
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -59,7 +61,7 @@ public void cleanup() throws Exception {
|
||||||
fs.close();
|
fs.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testWithDirStringAndConf() throws Exception {
|
public void testWithDirStringAndConf() throws Exception {
|
||||||
String dirString = "d1";
|
String dirString = "d1";
|
||||||
PathData item = new PathData(dirString, conf);
|
PathData item = new PathData(dirString, conf);
|
||||||
|
@ -72,7 +74,7 @@ public void testWithDirStringAndConf() throws Exception {
|
||||||
checkPathData(dirString, item);
|
checkPathData(dirString, item);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testUnqualifiedUriContents() throws Exception {
|
public void testUnqualifiedUriContents() throws Exception {
|
||||||
String dirString = "d1";
|
String dirString = "d1";
|
||||||
PathData item = new PathData(dirString, conf);
|
PathData item = new PathData(dirString, conf);
|
||||||
|
@ -83,7 +85,7 @@ public void testUnqualifiedUriContents() throws Exception {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testQualifiedUriContents() throws Exception {
|
public void testQualifiedUriContents() throws Exception {
|
||||||
String dirString = fs.makeQualified(new Path("d1")).toString();
|
String dirString = fs.makeQualified(new Path("d1")).toString();
|
||||||
PathData item = new PathData(dirString, conf);
|
PathData item = new PathData(dirString, conf);
|
||||||
|
@ -94,7 +96,7 @@ public void testQualifiedUriContents() throws Exception {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCwdContents() throws Exception {
|
public void testCwdContents() throws Exception {
|
||||||
String dirString = Path.CUR_DIR;
|
String dirString = Path.CUR_DIR;
|
||||||
PathData item = new PathData(dirString, conf);
|
PathData item = new PathData(dirString, conf);
|
||||||
|
@ -105,7 +107,7 @@ public void testCwdContents() throws Exception {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testToFile() throws Exception {
|
public void testToFile() throws Exception {
|
||||||
PathData item = new PathData(".", conf);
|
PathData item = new PathData(".", conf);
|
||||||
assertEquals(new File(testDir.toString()), item.toFile());
|
assertEquals(new File(testDir.toString()), item.toFile());
|
||||||
|
@ -115,7 +117,56 @@ public void testToFile() throws Exception {
|
||||||
assertEquals(new File(testDir + "/d1/f1"), item.toFile());
|
assertEquals(new File(testDir + "/d1/f1"), item.toFile());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 5000)
|
||||||
|
public void testToFileRawWindowsPaths() throws Exception {
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Can we handle raw Windows paths? The files need not exist for
|
||||||
|
// these tests to succeed.
|
||||||
|
String[] winPaths = {
|
||||||
|
"n:\\",
|
||||||
|
"N:\\",
|
||||||
|
"N:\\foo",
|
||||||
|
"N:\\foo\\bar",
|
||||||
|
"N:/",
|
||||||
|
"N:/foo",
|
||||||
|
"N:/foo/bar"
|
||||||
|
};
|
||||||
|
|
||||||
|
PathData item;
|
||||||
|
|
||||||
|
for (String path : winPaths) {
|
||||||
|
item = new PathData(path, conf);
|
||||||
|
assertEquals(new File(path), item.toFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
item = new PathData("foo\\bar", conf);
|
||||||
|
assertEquals(new File(testDir + "\\foo\\bar"), item.toFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 5000)
|
||||||
|
public void testInvalidWindowsPath() throws Exception {
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that the following invalid paths are rejected.
|
||||||
|
String [] winPaths = {
|
||||||
|
"N:\\foo/bar"
|
||||||
|
};
|
||||||
|
|
||||||
|
for (String path : winPaths) {
|
||||||
|
try {
|
||||||
|
PathData item = new PathData(path, conf);
|
||||||
|
fail("Did not throw for invalid path " + path);
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testAbsoluteGlob() throws Exception {
|
public void testAbsoluteGlob() throws Exception {
|
||||||
PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf);
|
PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -124,7 +175,7 @@ public void testAbsoluteGlob() throws Exception {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testRelativeGlob() throws Exception {
|
public void testRelativeGlob() throws Exception {
|
||||||
PathData[] items = PathData.expandAsGlob("d1/f1*", conf);
|
PathData[] items = PathData.expandAsGlob("d1/f1*", conf);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -133,7 +184,7 @@ public void testRelativeGlob() throws Exception {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testRelativeGlobBack() throws Exception {
|
public void testRelativeGlobBack() throws Exception {
|
||||||
fs.setWorkingDirectory(new Path("d1"));
|
fs.setWorkingDirectory(new Path("d1"));
|
||||||
PathData[] items = PathData.expandAsGlob("../d2/*", conf);
|
PathData[] items = PathData.expandAsGlob("../d2/*", conf);
|
||||||
|
@ -143,7 +194,7 @@ public void testRelativeGlobBack() throws Exception {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testWithStringAndConfForBuggyPath() throws Exception {
|
public void testWithStringAndConfForBuggyPath() throws Exception {
|
||||||
String dirString = "file:///tmp";
|
String dirString = "file:///tmp";
|
||||||
Path tmpDir = new Path(dirString);
|
Path tmpDir = new Path(dirString);
|
||||||
|
|
|
@ -26,9 +26,11 @@
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
|
import java.net.URI;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -38,12 +40,13 @@
|
||||||
public class TestTextCommand {
|
public class TestTextCommand {
|
||||||
private static final String TEST_ROOT_DIR =
|
private static final String TEST_ROOT_DIR =
|
||||||
System.getProperty("test.build.data", "build/test/data/") + "/testText";
|
System.getProperty("test.build.data", "build/test/data/") + "/testText";
|
||||||
private static final String AVRO_FILENAME = TEST_ROOT_DIR + "/weather.avro";
|
private static final String AVRO_FILENAME =
|
||||||
|
new Path(TEST_ROOT_DIR, "weather.avro").toUri().getPath();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests whether binary Avro data files are displayed correctly.
|
* Tests whether binary Avro data files are displayed correctly.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testDisplayForAvroFiles() throws Exception {
|
public void testDisplayForAvroFiles() throws Exception {
|
||||||
// Create a small Avro data file on the local file system.
|
// Create a small Avro data file on the local file system.
|
||||||
createAvroFile(generateWeatherAvroBinaryData());
|
createAvroFile(generateWeatherAvroBinaryData());
|
||||||
|
@ -51,7 +54,7 @@ public void testDisplayForAvroFiles() throws Exception {
|
||||||
// Prepare and call the Text command's protected getInputStream method
|
// Prepare and call the Text command's protected getInputStream method
|
||||||
// using reflection.
|
// using reflection.
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
File localPath = new File(AVRO_FILENAME);
|
URI localPath = new URI(AVRO_FILENAME);
|
||||||
PathData pathData = new PathData(localPath, conf);
|
PathData pathData = new PathData(localPath, conf);
|
||||||
Display.Text text = new Display.Text();
|
Display.Text text = new Display.Text();
|
||||||
text.setConf(conf);
|
text.setConf(conf);
|
||||||
|
|
|
@ -21,6 +21,8 @@
|
||||||
import java.io.FileDescriptor;
|
import java.io.FileDescriptor;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.FileReader;
|
||||||
|
import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -60,11 +62,15 @@ public void setupTestDir() {
|
||||||
TEST_DIR.mkdirs();
|
TEST_DIR.mkdirs();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFstat() throws Exception {
|
public void testFstat() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
FileOutputStream fos = new FileOutputStream(
|
FileOutputStream fos = new FileOutputStream(
|
||||||
new File(TEST_DIR, "testfstat"));
|
new File(TEST_DIR, "testfstat"));
|
||||||
NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
|
NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
|
||||||
fos.close();
|
fos.close();
|
||||||
LOG.info("Stat: " + String.valueOf(stat));
|
LOG.info("Stat: " + String.valueOf(stat));
|
||||||
|
|
||||||
|
@ -72,7 +78,8 @@ public void testFstat() throws Exception {
|
||||||
assertNotNull(stat.getGroup());
|
assertNotNull(stat.getGroup());
|
||||||
assertTrue(!"".equals(stat.getGroup()));
|
assertTrue(!"".equals(stat.getGroup()));
|
||||||
assertEquals("Stat mode field should indicate a regular file",
|
assertEquals("Stat mode field should indicate a regular file",
|
||||||
NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
|
NativeIO.POSIX.Stat.S_IFREG,
|
||||||
|
stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -81,8 +88,12 @@ public void testFstat() throws Exception {
|
||||||
* NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe
|
* NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe
|
||||||
* implementation of getpwuid_r.
|
* implementation of getpwuid_r.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testMultiThreadedFstat() throws Exception {
|
public void testMultiThreadedFstat() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
final FileOutputStream fos = new FileOutputStream(
|
final FileOutputStream fos = new FileOutputStream(
|
||||||
new File(TEST_DIR, "testfstat"));
|
new File(TEST_DIR, "testfstat"));
|
||||||
|
|
||||||
|
@ -96,12 +107,13 @@ public void run() {
|
||||||
long et = Time.now() + 5000;
|
long et = Time.now() + 5000;
|
||||||
while (Time.now() < et) {
|
while (Time.now() < et) {
|
||||||
try {
|
try {
|
||||||
NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
|
NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
|
||||||
assertEquals(System.getProperty("user.name"), stat.getOwner());
|
assertEquals(System.getProperty("user.name"), stat.getOwner());
|
||||||
assertNotNull(stat.getGroup());
|
assertNotNull(stat.getGroup());
|
||||||
assertTrue(!"".equals(stat.getGroup()));
|
assertTrue(!"".equals(stat.getGroup()));
|
||||||
assertEquals("Stat mode field should indicate a regular file",
|
assertEquals("Stat mode field should indicate a regular file",
|
||||||
NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
|
NativeIO.POSIX.Stat.S_IFREG,
|
||||||
|
stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
thrown.set(t);
|
thrown.set(t);
|
||||||
}
|
}
|
||||||
|
@ -122,26 +134,123 @@ public void run() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFstatClosedFd() throws Exception {
|
public void testFstatClosedFd() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
FileOutputStream fos = new FileOutputStream(
|
FileOutputStream fos = new FileOutputStream(
|
||||||
new File(TEST_DIR, "testfstat2"));
|
new File(TEST_DIR, "testfstat2"));
|
||||||
fos.close();
|
fos.close();
|
||||||
try {
|
try {
|
||||||
NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
|
NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
|
||||||
} catch (NativeIOException nioe) {
|
} catch (NativeIOException nioe) {
|
||||||
LOG.info("Got expected exception", nioe);
|
LOG.info("Got expected exception", nioe);
|
||||||
assertEquals(Errno.EBADF, nioe.getErrno());
|
assertEquals(Errno.EBADF, nioe.getErrno());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
|
public void testSetFilePointer() throws Exception {
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG.info("Set a file pointer on Windows");
|
||||||
|
try {
|
||||||
|
File testfile = new File(TEST_DIR, "testSetFilePointer");
|
||||||
|
assertTrue("Create test subject",
|
||||||
|
testfile.exists() || testfile.createNewFile());
|
||||||
|
FileWriter writer = new FileWriter(testfile);
|
||||||
|
try {
|
||||||
|
for (int i = 0; i < 200; i++)
|
||||||
|
if (i < 100)
|
||||||
|
writer.write('a');
|
||||||
|
else
|
||||||
|
writer.write('b');
|
||||||
|
writer.flush();
|
||||||
|
} catch (Exception writerException) {
|
||||||
|
fail("Got unexpected exception: " + writerException.getMessage());
|
||||||
|
} finally {
|
||||||
|
writer.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
FileDescriptor fd = NativeIO.Windows.createFile(
|
||||||
|
testfile.getCanonicalPath(),
|
||||||
|
NativeIO.Windows.GENERIC_READ,
|
||||||
|
NativeIO.Windows.FILE_SHARE_READ |
|
||||||
|
NativeIO.Windows.FILE_SHARE_WRITE |
|
||||||
|
NativeIO.Windows.FILE_SHARE_DELETE,
|
||||||
|
NativeIO.Windows.OPEN_EXISTING);
|
||||||
|
NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN);
|
||||||
|
FileReader reader = new FileReader(fd);
|
||||||
|
try {
|
||||||
|
int c = reader.read();
|
||||||
|
assertTrue("Unexpected character: " + c, c == 'b');
|
||||||
|
} catch (Exception readerException) {
|
||||||
|
fail("Got unexpected exception: " + readerException.getMessage());
|
||||||
|
} finally {
|
||||||
|
reader.close();
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail("Got unexpected exception: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testCreateFile() throws Exception {
|
||||||
|
if (!Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
|
||||||
|
try {
|
||||||
|
File testfile = new File(TEST_DIR, "testCreateFile");
|
||||||
|
assertTrue("Create test subject",
|
||||||
|
testfile.exists() || testfile.createNewFile());
|
||||||
|
|
||||||
|
FileDescriptor fd = NativeIO.Windows.createFile(
|
||||||
|
testfile.getCanonicalPath(),
|
||||||
|
NativeIO.Windows.GENERIC_READ,
|
||||||
|
NativeIO.Windows.FILE_SHARE_READ |
|
||||||
|
NativeIO.Windows.FILE_SHARE_WRITE |
|
||||||
|
NativeIO.Windows.FILE_SHARE_DELETE,
|
||||||
|
NativeIO.Windows.OPEN_EXISTING);
|
||||||
|
|
||||||
|
FileInputStream fin = new FileInputStream(fd);
|
||||||
|
try {
|
||||||
|
fin.read();
|
||||||
|
|
||||||
|
File newfile = new File(TEST_DIR, "testRenamedFile");
|
||||||
|
|
||||||
|
boolean renamed = testfile.renameTo(newfile);
|
||||||
|
assertTrue("Rename failed.", renamed);
|
||||||
|
|
||||||
|
fin.read();
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail("Got unexpected exception: " + e.getMessage());
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
fin.close();
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail("Got unexpected exception: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testOpenMissingWithoutCreate() throws Exception {
|
public void testOpenMissingWithoutCreate() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
LOG.info("Open a missing file without O_CREAT and it should fail");
|
LOG.info("Open a missing file without O_CREAT and it should fail");
|
||||||
try {
|
try {
|
||||||
FileDescriptor fd = NativeIO.open(
|
FileDescriptor fd = NativeIO.POSIX.open(
|
||||||
new File(TEST_DIR, "doesntexist").getAbsolutePath(),
|
new File(TEST_DIR, "doesntexist").getAbsolutePath(),
|
||||||
NativeIO.O_WRONLY, 0700);
|
NativeIO.POSIX.O_WRONLY, 0700);
|
||||||
fail("Able to open a new file without O_CREAT");
|
fail("Able to open a new file without O_CREAT");
|
||||||
} catch (NativeIOException nioe) {
|
} catch (NativeIOException nioe) {
|
||||||
LOG.info("Got expected exception", nioe);
|
LOG.info("Got expected exception", nioe);
|
||||||
|
@ -149,12 +258,16 @@ public void testOpenMissingWithoutCreate() throws Exception {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testOpenWithCreate() throws Exception {
|
public void testOpenWithCreate() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
LOG.info("Test creating a file with O_CREAT");
|
LOG.info("Test creating a file with O_CREAT");
|
||||||
FileDescriptor fd = NativeIO.open(
|
FileDescriptor fd = NativeIO.POSIX.open(
|
||||||
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
|
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
|
||||||
NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
|
NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
|
||||||
assertNotNull(true);
|
assertNotNull(true);
|
||||||
assertTrue(fd.valid());
|
assertTrue(fd.valid());
|
||||||
FileOutputStream fos = new FileOutputStream(fd);
|
FileOutputStream fos = new FileOutputStream(fd);
|
||||||
|
@ -165,9 +278,9 @@ public void testOpenWithCreate() throws Exception {
|
||||||
|
|
||||||
LOG.info("Test exclusive create");
|
LOG.info("Test exclusive create");
|
||||||
try {
|
try {
|
||||||
fd = NativeIO.open(
|
fd = NativeIO.POSIX.open(
|
||||||
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
|
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
|
||||||
NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
|
NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT | NativeIO.POSIX.O_EXCL, 0700);
|
||||||
fail("Was able to create existing file with O_EXCL");
|
fail("Was able to create existing file with O_EXCL");
|
||||||
} catch (NativeIOException nioe) {
|
} catch (NativeIOException nioe) {
|
||||||
LOG.info("Got expected exception for failed exclusive create", nioe);
|
LOG.info("Got expected exception for failed exclusive create", nioe);
|
||||||
|
@ -179,12 +292,16 @@ public void testOpenWithCreate() throws Exception {
|
||||||
* Test that opens and closes a file 10000 times - this would crash with
|
* Test that opens and closes a file 10000 times - this would crash with
|
||||||
* "Too many open files" if we leaked fds using this access pattern.
|
* "Too many open files" if we leaked fds using this access pattern.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testFDDoesntLeak() throws IOException {
|
public void testFDDoesntLeak() throws IOException {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
for (int i = 0; i < 10000; i++) {
|
for (int i = 0; i < 10000; i++) {
|
||||||
FileDescriptor fd = NativeIO.open(
|
FileDescriptor fd = NativeIO.POSIX.open(
|
||||||
new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
|
new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
|
||||||
NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
|
NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
|
||||||
assertNotNull(true);
|
assertNotNull(true);
|
||||||
assertTrue(fd.valid());
|
assertTrue(fd.valid());
|
||||||
FileOutputStream fos = new FileOutputStream(fd);
|
FileOutputStream fos = new FileOutputStream(fd);
|
||||||
|
@ -196,10 +313,14 @@ public void testFDDoesntLeak() throws IOException {
|
||||||
/**
|
/**
|
||||||
* Test basic chmod operation
|
* Test basic chmod operation
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testChmod() throws Exception {
|
public void testChmod() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
NativeIO.chmod("/this/file/doesnt/exist", 777);
|
NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
|
||||||
fail("Chmod of non-existent file didn't fail");
|
fail("Chmod of non-existent file didn't fail");
|
||||||
} catch (NativeIOException nioe) {
|
} catch (NativeIOException nioe) {
|
||||||
assertEquals(Errno.ENOENT, nioe.getErrno());
|
assertEquals(Errno.ENOENT, nioe.getErrno());
|
||||||
|
@ -208,21 +329,26 @@ public void testChmod() throws Exception {
|
||||||
File toChmod = new File(TEST_DIR, "testChmod");
|
File toChmod = new File(TEST_DIR, "testChmod");
|
||||||
assertTrue("Create test subject",
|
assertTrue("Create test subject",
|
||||||
toChmod.exists() || toChmod.mkdir());
|
toChmod.exists() || toChmod.mkdir());
|
||||||
NativeIO.chmod(toChmod.getAbsolutePath(), 0777);
|
NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777);
|
||||||
assertPermissions(toChmod, 0777);
|
assertPermissions(toChmod, 0777);
|
||||||
NativeIO.chmod(toChmod.getAbsolutePath(), 0000);
|
NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000);
|
||||||
assertPermissions(toChmod, 0000);
|
assertPermissions(toChmod, 0000);
|
||||||
NativeIO.chmod(toChmod.getAbsolutePath(), 0644);
|
NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0644);
|
||||||
assertPermissions(toChmod, 0644);
|
assertPermissions(toChmod, 0644);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testPosixFadvise() throws Exception {
|
public void testPosixFadvise() throws Exception {
|
||||||
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
FileInputStream fis = new FileInputStream("/dev/zero");
|
FileInputStream fis = new FileInputStream("/dev/zero");
|
||||||
try {
|
try {
|
||||||
NativeIO.posix_fadvise(fis.getFD(), 0, 0,
|
NativeIO.POSIX.posix_fadvise(
|
||||||
NativeIO.POSIX_FADV_SEQUENTIAL);
|
fis.getFD(), 0, 0,
|
||||||
|
NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
|
||||||
} catch (UnsupportedOperationException uoe) {
|
} catch (UnsupportedOperationException uoe) {
|
||||||
// we should just skip the unit test on machines where we don't
|
// we should just skip the unit test on machines where we don't
|
||||||
// have fadvise support
|
// have fadvise support
|
||||||
|
@ -235,8 +361,9 @@ public void testPosixFadvise() throws Exception {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
NativeIO.posix_fadvise(fis.getFD(), 0, 1024,
|
NativeIO.POSIX.posix_fadvise(
|
||||||
NativeIO.POSIX_FADV_SEQUENTIAL);
|
fis.getFD(), 0, 1024,
|
||||||
|
NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
|
||||||
|
|
||||||
fail("Did not throw on bad file");
|
fail("Did not throw on bad file");
|
||||||
} catch (NativeIOException nioe) {
|
} catch (NativeIOException nioe) {
|
||||||
|
@ -244,8 +371,9 @@ public void testPosixFadvise() throws Exception {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
NativeIO.posix_fadvise(null, 0, 1024,
|
NativeIO.POSIX.posix_fadvise(
|
||||||
NativeIO.POSIX_FADV_SEQUENTIAL);
|
null, 0, 1024,
|
||||||
|
NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
|
||||||
|
|
||||||
fail("Did not throw on null file");
|
fail("Did not throw on null file");
|
||||||
} catch (NullPointerException npe) {
|
} catch (NullPointerException npe) {
|
||||||
|
@ -253,14 +381,15 @@ public void testPosixFadvise() throws Exception {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testSyncFileRange() throws Exception {
|
public void testSyncFileRange() throws Exception {
|
||||||
FileOutputStream fos = new FileOutputStream(
|
FileOutputStream fos = new FileOutputStream(
|
||||||
new File(TEST_DIR, "testSyncFileRange"));
|
new File(TEST_DIR, "testSyncFileRange"));
|
||||||
try {
|
try {
|
||||||
fos.write("foo".getBytes());
|
fos.write("foo".getBytes());
|
||||||
NativeIO.sync_file_range(fos.getFD(), 0, 1024,
|
NativeIO.POSIX.sync_file_range(
|
||||||
NativeIO.SYNC_FILE_RANGE_WRITE);
|
fos.getFD(), 0, 1024,
|
||||||
|
NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
|
||||||
// no way to verify that this actually has synced,
|
// no way to verify that this actually has synced,
|
||||||
// but if it doesn't throw, we can assume it worked
|
// but if it doesn't throw, we can assume it worked
|
||||||
} catch (UnsupportedOperationException uoe) {
|
} catch (UnsupportedOperationException uoe) {
|
||||||
|
@ -271,8 +400,9 @@ public void testSyncFileRange() throws Exception {
|
||||||
fos.close();
|
fos.close();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
NativeIO.sync_file_range(fos.getFD(), 0, 1024,
|
NativeIO.POSIX.sync_file_range(
|
||||||
NativeIO.SYNC_FILE_RANGE_WRITE);
|
fos.getFD(), 0, 1024,
|
||||||
|
NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
|
||||||
fail("Did not throw on bad file");
|
fail("Did not throw on bad file");
|
||||||
} catch (NativeIOException nioe) {
|
} catch (NativeIOException nioe) {
|
||||||
assertEquals(Errno.EBADF, nioe.getErrno());
|
assertEquals(Errno.EBADF, nioe.getErrno());
|
||||||
|
@ -286,17 +416,25 @@ private void assertPermissions(File f, int expected) throws IOException {
|
||||||
assertEquals(expected, perms.toShort());
|
assertEquals(expected, perms.toShort());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetUserName() throws IOException {
|
public void testGetUserName() throws IOException {
|
||||||
assertFalse(NativeIO.getUserName(0).isEmpty());
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testGetGroupName() throws IOException {
|
public void testGetGroupName() throws IOException {
|
||||||
assertFalse(NativeIO.getGroupName(0).isEmpty());
|
if (Path.WINDOWS) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
public void testRenameTo() throws Exception {
|
public void testRenameTo() throws Exception {
|
||||||
final File TEST_DIR = new File(new File(
|
final File TEST_DIR = new File(new File(
|
||||||
System.getProperty("test.build.data","build/test/data")), "renameTest");
|
System.getProperty("test.build.data","build/test/data")), "renameTest");
|
||||||
|
|
|
@ -43,6 +43,7 @@
|
||||||
import static org.apache.hadoop.test.MetricsAsserts.*;
|
import static org.apache.hadoop.test.MetricsAsserts.*;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
|
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
public class TestUserGroupInformation {
|
public class TestUserGroupInformation {
|
||||||
final private static String USER_NAME = "user1@HADOOP.APACHE.ORG";
|
final private static String USER_NAME = "user1@HADOOP.APACHE.ORG";
|
||||||
|
@ -92,17 +93,17 @@ public void resetUgi() {
|
||||||
UserGroupInformation.setLoginUser(null);
|
UserGroupInformation.setLoginUser(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testSimpleLogin() throws IOException {
|
public void testSimpleLogin() throws IOException {
|
||||||
tryLoginAuthenticationMethod(AuthenticationMethod.SIMPLE, true);
|
tryLoginAuthenticationMethod(AuthenticationMethod.SIMPLE, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testTokenLogin() throws IOException {
|
public void testTokenLogin() throws IOException {
|
||||||
tryLoginAuthenticationMethod(AuthenticationMethod.TOKEN, false);
|
tryLoginAuthenticationMethod(AuthenticationMethod.TOKEN, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testProxyLogin() throws IOException {
|
public void testProxyLogin() throws IOException {
|
||||||
tryLoginAuthenticationMethod(AuthenticationMethod.PROXY, false);
|
tryLoginAuthenticationMethod(AuthenticationMethod.PROXY, false);
|
||||||
}
|
}
|
||||||
|
@ -131,7 +132,7 @@ private void tryLoginAuthenticationMethod(AuthenticationMethod method,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetRealAuthenticationMethod() {
|
public void testGetRealAuthenticationMethod() {
|
||||||
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1");
|
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1");
|
||||||
ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
|
ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
|
||||||
|
@ -142,7 +143,7 @@ public void testGetRealAuthenticationMethod() {
|
||||||
assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
|
assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
|
||||||
}
|
}
|
||||||
/** Test login method */
|
/** Test login method */
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testLogin() throws Exception {
|
public void testLogin() throws Exception {
|
||||||
// login from unix
|
// login from unix
|
||||||
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
||||||
|
@ -169,7 +170,7 @@ public UserGroupInformation run() throws IOException {
|
||||||
* given user name - get all the groups.
|
* given user name - get all the groups.
|
||||||
* Needs to happen before creating the test users
|
* Needs to happen before creating the test users
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetServerSideGroups() throws IOException,
|
public void testGetServerSideGroups() throws IOException,
|
||||||
InterruptedException {
|
InterruptedException {
|
||||||
// get the user name
|
// get the user name
|
||||||
|
@ -177,19 +178,38 @@ public void testGetServerSideGroups() throws IOException,
|
||||||
BufferedReader br = new BufferedReader
|
BufferedReader br = new BufferedReader
|
||||||
(new InputStreamReader(pp.getInputStream()));
|
(new InputStreamReader(pp.getInputStream()));
|
||||||
String userName = br.readLine().trim();
|
String userName = br.readLine().trim();
|
||||||
|
// If on windows domain, token format is DOMAIN\\user and we want to
|
||||||
|
// extract only the user name
|
||||||
|
if(Shell.WINDOWS) {
|
||||||
|
int sp = userName.lastIndexOf('\\');
|
||||||
|
if (sp != -1) {
|
||||||
|
userName = userName.substring(sp + 1);
|
||||||
|
}
|
||||||
|
// user names are case insensitive on Windows. Make consistent
|
||||||
|
userName = userName.toLowerCase();
|
||||||
|
}
|
||||||
// get the groups
|
// get the groups
|
||||||
pp = Runtime.getRuntime().exec("id -Gn " + userName);
|
pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
|
||||||
|
Shell.WINUTILS + " groups -F" : "id -Gn");
|
||||||
br = new BufferedReader(new InputStreamReader(pp.getInputStream()));
|
br = new BufferedReader(new InputStreamReader(pp.getInputStream()));
|
||||||
String line = br.readLine();
|
String line = br.readLine();
|
||||||
|
|
||||||
System.out.println(userName + ":" + line);
|
System.out.println(userName + ":" + line);
|
||||||
|
|
||||||
Set<String> groups = new LinkedHashSet<String> ();
|
Set<String> groups = new LinkedHashSet<String> ();
|
||||||
for(String s: line.split("[\\s]")) {
|
String[] tokens = line.split(Shell.TOKEN_SEPARATOR_REGEX);
|
||||||
|
for(String s: tokens) {
|
||||||
groups.add(s);
|
groups.add(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
final UserGroupInformation login = UserGroupInformation.getCurrentUser();
|
final UserGroupInformation login = UserGroupInformation.getCurrentUser();
|
||||||
assertEquals(userName, login.getShortUserName());
|
String loginUserName = login.getShortUserName();
|
||||||
|
if(Shell.WINDOWS) {
|
||||||
|
// user names are case insensitive on Windows. Make consistent
|
||||||
|
loginUserName = loginUserName.toLowerCase();
|
||||||
|
}
|
||||||
|
assertEquals(userName, loginUserName);
|
||||||
|
|
||||||
String[] gi = login.getGroupNames();
|
String[] gi = login.getGroupNames();
|
||||||
assertEquals(groups.size(), gi.length);
|
assertEquals(groups.size(), gi.length);
|
||||||
for(int i=0; i < gi.length; i++) {
|
for(int i=0; i < gi.length; i++) {
|
||||||
|
@ -210,7 +230,7 @@ public Object run() throws IOException {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** test constructor */
|
/** test constructor */
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testConstructor() throws Exception {
|
public void testConstructor() throws Exception {
|
||||||
// security off, so default should just return simple name
|
// security off, so default should just return simple name
|
||||||
testConstructorSuccess("user1", "user1");
|
testConstructorSuccess("user1", "user1");
|
||||||
|
@ -297,7 +317,7 @@ private void testConstructorFailures(String userName) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testSetConfigWithRules() {
|
public void testSetConfigWithRules() {
|
||||||
String[] rules = { "RULE:[1:TEST1]", "RULE:[1:TEST2]", "RULE:[1:TEST3]" };
|
String[] rules = { "RULE:[1:TEST1]", "RULE:[1:TEST2]", "RULE:[1:TEST3]" };
|
||||||
|
|
||||||
|
@ -364,7 +384,7 @@ public void testEquals() throws Exception {
|
||||||
assertEquals(uugi.hashCode(), ugi3.hashCode());
|
assertEquals(uugi.hashCode(), ugi3.hashCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testEqualsWithRealUser() throws Exception {
|
public void testEqualsWithRealUser() throws Exception {
|
||||||
UserGroupInformation realUgi1 = UserGroupInformation.createUserForTesting(
|
UserGroupInformation realUgi1 = UserGroupInformation.createUserForTesting(
|
||||||
"RealUser", GROUP_NAMES);
|
"RealUser", GROUP_NAMES);
|
||||||
|
@ -377,7 +397,7 @@ public void testEqualsWithRealUser() throws Exception {
|
||||||
assertFalse(remoteUgi.equals(proxyUgi1));
|
assertFalse(remoteUgi.equals(proxyUgi1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGettingGroups() throws Exception {
|
public void testGettingGroups() throws Exception {
|
||||||
UserGroupInformation uugi =
|
UserGroupInformation uugi =
|
||||||
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
|
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
|
||||||
|
@ -387,7 +407,7 @@ public void testGettingGroups() throws Exception {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // from Mockito mocks
|
@SuppressWarnings("unchecked") // from Mockito mocks
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public <T extends TokenIdentifier> void testAddToken() throws Exception {
|
public <T extends TokenIdentifier> void testAddToken() throws Exception {
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
UserGroupInformation.createRemoteUser("someone");
|
UserGroupInformation.createRemoteUser("someone");
|
||||||
|
@ -425,7 +445,7 @@ public <T extends TokenIdentifier> void testAddToken() throws Exception {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // from Mockito mocks
|
@SuppressWarnings("unchecked") // from Mockito mocks
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public <T extends TokenIdentifier> void testGetCreds() throws Exception {
|
public <T extends TokenIdentifier> void testGetCreds() throws Exception {
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
UserGroupInformation.createRemoteUser("someone");
|
UserGroupInformation.createRemoteUser("someone");
|
||||||
|
@ -451,7 +471,7 @@ public <T extends TokenIdentifier> void testGetCreds() throws Exception {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // from Mockito mocks
|
@SuppressWarnings("unchecked") // from Mockito mocks
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public <T extends TokenIdentifier> void testAddCreds() throws Exception {
|
public <T extends TokenIdentifier> void testAddCreds() throws Exception {
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
UserGroupInformation.createRemoteUser("someone");
|
UserGroupInformation.createRemoteUser("someone");
|
||||||
|
@ -476,7 +496,7 @@ public <T extends TokenIdentifier> void testAddCreds() throws Exception {
|
||||||
assertSame(secret, ugi.getCredentials().getSecretKey(secretKey));
|
assertSame(secret, ugi.getCredentials().getSecretKey(secretKey));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public <T extends TokenIdentifier> void testGetCredsNotSame()
|
public <T extends TokenIdentifier> void testGetCredsNotSame()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
|
@ -504,7 +524,7 @@ private void checkTokens(UserGroupInformation ugi, Token<?> ... tokens) {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // from Mockito mocks
|
@SuppressWarnings("unchecked") // from Mockito mocks
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public <T extends TokenIdentifier> void testAddNamedToken() throws Exception {
|
public <T extends TokenIdentifier> void testAddNamedToken() throws Exception {
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
UserGroupInformation.createRemoteUser("someone");
|
UserGroupInformation.createRemoteUser("someone");
|
||||||
|
@ -525,7 +545,7 @@ public <T extends TokenIdentifier> void testAddNamedToken() throws Exception {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // from Mockito mocks
|
@SuppressWarnings("unchecked") // from Mockito mocks
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public <T extends TokenIdentifier> void testUGITokens() throws Exception {
|
public <T extends TokenIdentifier> void testUGITokens() throws Exception {
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
UserGroupInformation.createUserForTesting("TheDoctor",
|
UserGroupInformation.createUserForTesting("TheDoctor",
|
||||||
|
@ -571,7 +591,7 @@ public Collection<Token<?>> run() throws IOException {
|
||||||
assertTrue(otherSet.contains(t2));
|
assertTrue(otherSet.contains(t2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testTokenIdentifiers() throws Exception {
|
public void testTokenIdentifiers() throws Exception {
|
||||||
UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
|
UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
|
||||||
"TheDoctor", new String[] { "TheTARDIS" });
|
"TheDoctor", new String[] { "TheTARDIS" });
|
||||||
|
@ -599,7 +619,7 @@ public Collection<TokenIdentifier> run() throws IOException {
|
||||||
assertEquals(2, otherSet.size());
|
assertEquals(2, otherSet.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testTestAuthMethod() throws Exception {
|
public void testTestAuthMethod() throws Exception {
|
||||||
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
||||||
// verify the reverse mappings works
|
// verify the reverse mappings works
|
||||||
|
@ -611,7 +631,7 @@ public void testTestAuthMethod() throws Exception {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testUGIAuthMethod() throws Exception {
|
public void testUGIAuthMethod() throws Exception {
|
||||||
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
||||||
final AuthenticationMethod am = AuthenticationMethod.KERBEROS;
|
final AuthenticationMethod am = AuthenticationMethod.KERBEROS;
|
||||||
|
@ -627,7 +647,7 @@ public Object run() throws IOException {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testUGIAuthMethodInRealUser() throws Exception {
|
public void testUGIAuthMethodInRealUser() throws Exception {
|
||||||
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
||||||
UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser(
|
UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser(
|
||||||
|
@ -662,7 +682,7 @@ public Object run() throws IOException {
|
||||||
Assert.assertEquals(proxyUgi3, proxyUgi4);
|
Assert.assertEquals(proxyUgi3, proxyUgi4);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testLoginObjectInSubject() throws Exception {
|
public void testLoginObjectInSubject() throws Exception {
|
||||||
UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
|
UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
|
||||||
UserGroupInformation anotherUgi = new UserGroupInformation(loginUgi
|
UserGroupInformation anotherUgi = new UserGroupInformation(loginUgi
|
||||||
|
@ -675,7 +695,7 @@ public void testLoginObjectInSubject() throws Exception {
|
||||||
Assert.assertTrue(login1 == login2);
|
Assert.assertTrue(login1 == login2);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testLoginModuleCommit() throws Exception {
|
public void testLoginModuleCommit() throws Exception {
|
||||||
UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
|
UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
|
||||||
User user1 = loginUgi.getSubject().getPrincipals(User.class).iterator()
|
User user1 = loginUgi.getSubject().getPrincipals(User.class).iterator()
|
||||||
|
@ -709,7 +729,7 @@ public static void verifyLoginMetrics(long success, int failure)
|
||||||
* with it, but that Subject was not created by Hadoop (ie it has no
|
* with it, but that Subject was not created by Hadoop (ie it has no
|
||||||
* associated User principal)
|
* associated User principal)
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testUGIUnderNonHadoopContext() throws Exception {
|
public void testUGIUnderNonHadoopContext() throws Exception {
|
||||||
Subject nonHadoopSubject = new Subject();
|
Subject nonHadoopSubject = new Subject();
|
||||||
Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction<Void>() {
|
Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction<Void>() {
|
||||||
|
|
|
@ -30,24 +30,29 @@
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
|
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
|
|
||||||
public class TestDiskChecker {
|
public class TestDiskChecker {
|
||||||
final FsPermission defaultPerm = new FsPermission("755");
|
final FsPermission defaultPerm = new FsPermission("755");
|
||||||
final FsPermission invalidPerm = new FsPermission("000");
|
final FsPermission invalidPerm = new FsPermission("000");
|
||||||
|
|
||||||
@Test public void testMkdirs_dirExists() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testMkdirs_dirExists() throws Throwable {
|
||||||
_mkdirs(true, defaultPerm, defaultPerm);
|
_mkdirs(true, defaultPerm, defaultPerm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testMkdirs_noDir() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testMkdirs_noDir() throws Throwable {
|
||||||
_mkdirs(false, defaultPerm, defaultPerm);
|
_mkdirs(false, defaultPerm, defaultPerm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testMkdirs_dirExists_badUmask() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testMkdirs_dirExists_badUmask() throws Throwable {
|
||||||
_mkdirs(true, defaultPerm, invalidPerm);
|
_mkdirs(true, defaultPerm, invalidPerm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testMkdirs_noDir_badUmask() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testMkdirs_noDir_badUmask() throws Throwable {
|
||||||
_mkdirs(false, defaultPerm, invalidPerm);
|
_mkdirs(false, defaultPerm, invalidPerm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,23 +83,28 @@ private void _mkdirs(boolean exists, FsPermission before, FsPermission after)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testCheckDir_normal() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testCheckDir_normal() throws Throwable {
|
||||||
_checkDirs(true, new FsPermission("755"), true);
|
_checkDirs(true, new FsPermission("755"), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testCheckDir_notDir() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testCheckDir_notDir() throws Throwable {
|
||||||
_checkDirs(false, new FsPermission("000"), false);
|
_checkDirs(false, new FsPermission("000"), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testCheckDir_notReadable() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testCheckDir_notReadable() throws Throwable {
|
||||||
_checkDirs(true, new FsPermission("000"), false);
|
_checkDirs(true, new FsPermission("000"), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testCheckDir_notWritable() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testCheckDir_notWritable() throws Throwable {
|
||||||
_checkDirs(true, new FsPermission("444"), false);
|
_checkDirs(true, new FsPermission("444"), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testCheckDir_notListable() throws Throwable {
|
@Test (timeout = 30000)
|
||||||
|
public void testCheckDir_notListable() throws Throwable {
|
||||||
_checkDirs(true, new FsPermission("666"), false); // not listable
|
_checkDirs(true, new FsPermission("666"), false); // not listable
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,27 +140,27 @@ private void _checkDirs(boolean isDir, FsPermission perm, boolean success)
|
||||||
* permission for result of mapper.
|
* permission for result of mapper.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCheckDir_normal_local() throws Throwable {
|
public void testCheckDir_normal_local() throws Throwable {
|
||||||
_checkDirs(true, "755", true);
|
_checkDirs(true, "755", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCheckDir_notDir_local() throws Throwable {
|
public void testCheckDir_notDir_local() throws Throwable {
|
||||||
_checkDirs(false, "000", false);
|
_checkDirs(false, "000", false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCheckDir_notReadable_local() throws Throwable {
|
public void testCheckDir_notReadable_local() throws Throwable {
|
||||||
_checkDirs(true, "000", false);
|
_checkDirs(true, "000", false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCheckDir_notWritable_local() throws Throwable {
|
public void testCheckDir_notWritable_local() throws Throwable {
|
||||||
_checkDirs(true, "444", false);
|
_checkDirs(true, "444", false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCheckDir_notListable_local() throws Throwable {
|
public void testCheckDir_notListable_local() throws Throwable {
|
||||||
_checkDirs(true, "666", false);
|
_checkDirs(true, "666", false);
|
||||||
}
|
}
|
||||||
|
@ -160,8 +170,8 @@ private void _checkDirs(boolean isDir, String perm, boolean success)
|
||||||
File localDir = File.createTempFile("test", "tmp");
|
File localDir = File.createTempFile("test", "tmp");
|
||||||
localDir.delete();
|
localDir.delete();
|
||||||
localDir.mkdir();
|
localDir.mkdir();
|
||||||
Runtime.getRuntime().exec(
|
Shell.execCommand(Shell.getSetPermissionCommand(perm, false,
|
||||||
"chmod " + perm + " " + localDir.getAbsolutePath()).waitFor();
|
localDir.getAbsolutePath()));
|
||||||
try {
|
try {
|
||||||
DiskChecker.checkDir(localDir);
|
DiskChecker.checkDir(localDir);
|
||||||
assertTrue("checkDir success", success);
|
assertTrue("checkDir success", success);
|
||||||
|
|
|
@ -49,7 +49,9 @@ public void testFilesOption() throws Exception {
|
||||||
String[] args = new String[2];
|
String[] args = new String[2];
|
||||||
// pass a files option
|
// pass a files option
|
||||||
args[0] = "-files";
|
args[0] = "-files";
|
||||||
args[1] = tmpFile.toString();
|
// Convert a file to a URI as File.toString() is not a valid URI on
|
||||||
|
// all platforms and GenericOptionsParser accepts only valid URIs
|
||||||
|
args[1] = tmpFile.toURI().toString();
|
||||||
new GenericOptionsParser(conf, args);
|
new GenericOptionsParser(conf, args);
|
||||||
String files = conf.get("tmpfiles");
|
String files = conf.get("tmpfiles");
|
||||||
assertNotNull("files is null", files);
|
assertNotNull("files is null", files);
|
||||||
|
@ -58,7 +60,7 @@ public void testFilesOption() throws Exception {
|
||||||
|
|
||||||
// pass file as uri
|
// pass file as uri
|
||||||
Configuration conf1 = new Configuration();
|
Configuration conf1 = new Configuration();
|
||||||
URI tmpURI = new URI(tmpFile.toString() + "#link");
|
URI tmpURI = new URI(tmpFile.toURI().toString() + "#link");
|
||||||
args[0] = "-files";
|
args[0] = "-files";
|
||||||
args[1] = tmpURI.toString();
|
args[1] = tmpURI.toString();
|
||||||
new GenericOptionsParser(conf1, args);
|
new GenericOptionsParser(conf1, args);
|
||||||
|
@ -153,7 +155,7 @@ public void testTokenCacheOption() throws IOException {
|
||||||
String[] args = new String[2];
|
String[] args = new String[2];
|
||||||
// pass a files option
|
// pass a files option
|
||||||
args[0] = "-tokenCacheFile";
|
args[0] = "-tokenCacheFile";
|
||||||
args[1] = tmpFile.toString();
|
args[1] = tmpFile.toURI().toString();
|
||||||
|
|
||||||
// test non existing file
|
// test non existing file
|
||||||
Throwable th = null;
|
Throwable th = null;
|
||||||
|
|
|
@ -81,6 +81,10 @@ public void testShellCommandExecutorToString() throws Throwable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testShellCommandTimeout() throws Throwable {
|
public void testShellCommandTimeout() throws Throwable {
|
||||||
|
if(Shell.WINDOWS) {
|
||||||
|
// setExecutable does not work on Windows
|
||||||
|
return;
|
||||||
|
}
|
||||||
String rootDir = new File(System.getProperty(
|
String rootDir = new File(System.getProperty(
|
||||||
"test.build.data", "/tmp")).getAbsolutePath();
|
"test.build.data", "/tmp")).getAbsolutePath();
|
||||||
File shellFile = new File(rootDir, "timeout.sh");
|
File shellFile = new File(rootDir, "timeout.sh");
|
||||||
|
|
|
@ -25,7 +25,10 @@
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.hadoop.test.UnitTestcaseTimeLimit;
|
import org.apache.hadoop.test.UnitTestcaseTimeLimit;
|
||||||
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
|
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
|
||||||
|
@ -43,7 +46,7 @@ public class TestStringUtils extends UnitTestcaseTimeLimit {
|
||||||
final private static String ESCAPED_STR_WITH_BOTH2 =
|
final private static String ESCAPED_STR_WITH_BOTH2 =
|
||||||
"\\,A\\\\\\,\\,B\\\\\\\\\\,";
|
"\\,A\\\\\\,\\,B\\\\\\\\\\,";
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testEscapeString() throws Exception {
|
public void testEscapeString() throws Exception {
|
||||||
assertEquals(NULL_STR, StringUtils.escapeString(NULL_STR));
|
assertEquals(NULL_STR, StringUtils.escapeString(NULL_STR));
|
||||||
assertEquals(EMPTY_STR, StringUtils.escapeString(EMPTY_STR));
|
assertEquals(EMPTY_STR, StringUtils.escapeString(EMPTY_STR));
|
||||||
|
@ -57,7 +60,7 @@ public void testEscapeString() throws Exception {
|
||||||
StringUtils.escapeString(STR_WITH_BOTH2));
|
StringUtils.escapeString(STR_WITH_BOTH2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testSplit() throws Exception {
|
public void testSplit() throws Exception {
|
||||||
assertEquals(NULL_STR, StringUtils.split(NULL_STR));
|
assertEquals(NULL_STR, StringUtils.split(NULL_STR));
|
||||||
String[] splits = StringUtils.split(EMPTY_STR);
|
String[] splits = StringUtils.split(EMPTY_STR);
|
||||||
|
@ -87,7 +90,7 @@ public void testSplit() throws Exception {
|
||||||
assertEquals(ESCAPED_STR_WITH_BOTH2, splits[0]);
|
assertEquals(ESCAPED_STR_WITH_BOTH2, splits[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testSimpleSplit() throws Exception {
|
public void testSimpleSplit() throws Exception {
|
||||||
final String[] TO_TEST = {
|
final String[] TO_TEST = {
|
||||||
"a/b/c",
|
"a/b/c",
|
||||||
|
@ -103,7 +106,7 @@ public void testSimpleSplit() throws Exception {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testUnescapeString() throws Exception {
|
public void testUnescapeString() throws Exception {
|
||||||
assertEquals(NULL_STR, StringUtils.unEscapeString(NULL_STR));
|
assertEquals(NULL_STR, StringUtils.unEscapeString(NULL_STR));
|
||||||
assertEquals(EMPTY_STR, StringUtils.unEscapeString(EMPTY_STR));
|
assertEquals(EMPTY_STR, StringUtils.unEscapeString(EMPTY_STR));
|
||||||
|
@ -135,7 +138,7 @@ public void testUnescapeString() throws Exception {
|
||||||
StringUtils.unEscapeString(ESCAPED_STR_WITH_BOTH2));
|
StringUtils.unEscapeString(ESCAPED_STR_WITH_BOTH2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testTraditionalBinaryPrefix() throws Exception {
|
public void testTraditionalBinaryPrefix() throws Exception {
|
||||||
//test string2long(..)
|
//test string2long(..)
|
||||||
String[] symbol = {"k", "m", "g", "t", "p", "e"};
|
String[] symbol = {"k", "m", "g", "t", "p", "e"};
|
||||||
|
@ -261,7 +264,7 @@ public void testTraditionalBinaryPrefix() throws Exception {
|
||||||
assertEquals("0.5430%", StringUtils.formatPercent(0.00543, 4));
|
assertEquals("0.5430%", StringUtils.formatPercent(0.00543, 4));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testJoin() {
|
public void testJoin() {
|
||||||
List<String> s = new ArrayList<String>();
|
List<String> s = new ArrayList<String>();
|
||||||
s.add("a");
|
s.add("a");
|
||||||
|
@ -273,7 +276,7 @@ public void testJoin() {
|
||||||
assertEquals("a:b:c", StringUtils.join(":", s.subList(0, 3)));
|
assertEquals("a:b:c", StringUtils.join(":", s.subList(0, 3)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testGetTrimmedStrings() throws Exception {
|
public void testGetTrimmedStrings() throws Exception {
|
||||||
String compactDirList = "/spindle1/hdfs,/spindle2/hdfs,/spindle3/hdfs";
|
String compactDirList = "/spindle1/hdfs,/spindle2/hdfs,/spindle3/hdfs";
|
||||||
String spacedDirList = "/spindle1/hdfs, /spindle2/hdfs, /spindle3/hdfs";
|
String spacedDirList = "/spindle1/hdfs, /spindle2/hdfs, /spindle3/hdfs";
|
||||||
|
@ -294,7 +297,7 @@ public void testGetTrimmedStrings() throws Exception {
|
||||||
assertArrayEquals(emptyArray, StringUtils.getTrimmedStrings(emptyList2));
|
assertArrayEquals(emptyArray, StringUtils.getTrimmedStrings(emptyList2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testCamelize() {
|
public void testCamelize() {
|
||||||
// common use cases
|
// common use cases
|
||||||
assertEquals("Map", StringUtils.camelize("MAP"));
|
assertEquals("Map", StringUtils.camelize("MAP"));
|
||||||
|
@ -330,7 +333,7 @@ public void testCamelize() {
|
||||||
assertEquals("Zz", StringUtils.camelize("zZ"));
|
assertEquals("Zz", StringUtils.camelize("zZ"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testStringToURI() {
|
public void testStringToURI() {
|
||||||
String[] str = new String[] { "file://" };
|
String[] str = new String[] { "file://" };
|
||||||
try {
|
try {
|
||||||
|
@ -341,7 +344,7 @@ public void testStringToURI() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test (timeout = 30000)
|
||||||
public void testSimpleHostName() {
|
public void testSimpleHostName() {
|
||||||
assertEquals("Should return hostname when FQDN is specified",
|
assertEquals("Should return hostname when FQDN is specified",
|
||||||
"hadoop01",
|
"hadoop01",
|
||||||
|
@ -354,6 +357,49 @@ public void testSimpleHostName() {
|
||||||
StringUtils.simpleHostname("10.10.5.68"));
|
StringUtils.simpleHostname("10.10.5.68"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 5000)
|
||||||
|
public void testReplaceTokensShellEnvVars() {
|
||||||
|
Pattern pattern = StringUtils.SHELL_ENV_VAR_PATTERN;
|
||||||
|
Map<String, String> replacements = new HashMap<String, String>();
|
||||||
|
replacements.put("FOO", "one");
|
||||||
|
replacements.put("BAZ", "two");
|
||||||
|
replacements.put("NUMBERS123", "one-two-three");
|
||||||
|
replacements.put("UNDER_SCORES", "___");
|
||||||
|
|
||||||
|
assertEquals("one", StringUtils.replaceTokens("$FOO", pattern,
|
||||||
|
replacements));
|
||||||
|
assertEquals("two", StringUtils.replaceTokens("$BAZ", pattern,
|
||||||
|
replacements));
|
||||||
|
assertEquals("", StringUtils.replaceTokens("$BAR", pattern, replacements));
|
||||||
|
assertEquals("", StringUtils.replaceTokens("", pattern, replacements));
|
||||||
|
assertEquals("one-two-three", StringUtils.replaceTokens("$NUMBERS123",
|
||||||
|
pattern, replacements));
|
||||||
|
assertEquals("___", StringUtils.replaceTokens("$UNDER_SCORES", pattern,
|
||||||
|
replacements));
|
||||||
|
assertEquals("//one//two//", StringUtils.replaceTokens("//$FOO/$BAR/$BAZ//",
|
||||||
|
pattern, replacements));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 5000)
|
||||||
|
public void testReplaceTokensWinEnvVars() {
|
||||||
|
Pattern pattern = StringUtils.WIN_ENV_VAR_PATTERN;
|
||||||
|
Map<String, String> replacements = new HashMap<String, String>();
|
||||||
|
replacements.put("foo", "zoo");
|
||||||
|
replacements.put("baz", "zaz");
|
||||||
|
|
||||||
|
assertEquals("zoo", StringUtils.replaceTokens("%foo%", pattern,
|
||||||
|
replacements));
|
||||||
|
assertEquals("zaz", StringUtils.replaceTokens("%baz%", pattern,
|
||||||
|
replacements));
|
||||||
|
assertEquals("", StringUtils.replaceTokens("%bar%", pattern,
|
||||||
|
replacements));
|
||||||
|
assertEquals("", StringUtils.replaceTokens("", pattern, replacements));
|
||||||
|
assertEquals("zoo__zaz", StringUtils.replaceTokens("%foo%_%bar%_%baz%",
|
||||||
|
pattern, replacements));
|
||||||
|
assertEquals("begin zoo__zaz end", StringUtils.replaceTokens(
|
||||||
|
"begin %foo%_%bar%_%baz% end", pattern, replacements));
|
||||||
|
}
|
||||||
|
|
||||||
// Benchmark for StringUtils split
|
// Benchmark for StringUtils split
|
||||||
public static void main(String []args) {
|
public static void main(String []args) {
|
||||||
final String TO_SPLIT = "foo,bar,baz,blah,blah";
|
final String TO_SPLIT = "foo,bar,baz,blah,blah";
|
||||||
|
|
|
@ -0,0 +1,352 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test cases for helper Windows winutils.exe utility.
|
||||||
|
*/
|
||||||
|
public class TestWinUtils {
|
||||||
|
|
||||||
|
private static final Log LOG = LogFactory.getLog(TestWinUtils.class);
|
||||||
|
private static File TEST_DIR = new File(System.getProperty("test.build.data",
|
||||||
|
"/tmp"), TestWinUtils.class.getSimpleName());
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() {
|
||||||
|
TEST_DIR.mkdirs();
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() throws IOException {
|
||||||
|
FileUtil.fullyDelete(TEST_DIR);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper routine that writes the given content to the file.
|
||||||
|
private void writeFile(File file, String content) throws IOException {
|
||||||
|
byte[] data = content.getBytes();
|
||||||
|
FileOutputStream os = new FileOutputStream(file);
|
||||||
|
os.write(data);
|
||||||
|
os.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper routine that reads the first 100 bytes from the file.
|
||||||
|
private String readFile(File file) throws IOException {
|
||||||
|
FileInputStream fos = new FileInputStream(file);
|
||||||
|
byte[] b = new byte[100];
|
||||||
|
fos.read(b);
|
||||||
|
return b.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testLs() throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// Not supported on non-Windows platforms
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
final String content = "6bytes";
|
||||||
|
final int contentSize = content.length();
|
||||||
|
File testFile = new File(TEST_DIR, "file1");
|
||||||
|
writeFile(testFile, content);
|
||||||
|
|
||||||
|
// Verify permissions and file name return tokens
|
||||||
|
String output = Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "ls", testFile.getCanonicalPath());
|
||||||
|
String[] outputArgs = output.split("[ \r\n]");
|
||||||
|
assertTrue(outputArgs[0].equals("-rwx------"));
|
||||||
|
assertTrue(outputArgs[outputArgs.length - 1]
|
||||||
|
.equals(testFile.getCanonicalPath()));
|
||||||
|
|
||||||
|
// Verify most tokens when using a formatted output (other tokens
|
||||||
|
// will be verified with chmod/chown)
|
||||||
|
output = Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "ls", "-F", testFile.getCanonicalPath());
|
||||||
|
outputArgs = output.split("[|\r\n]");
|
||||||
|
assertEquals(9, outputArgs.length);
|
||||||
|
assertTrue(outputArgs[0].equals("-rwx------"));
|
||||||
|
assertEquals(contentSize, Long.parseLong(outputArgs[4]));
|
||||||
|
assertTrue(outputArgs[8].equals(testFile.getCanonicalPath()));
|
||||||
|
|
||||||
|
testFile.delete();
|
||||||
|
assertFalse(testFile.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testGroups() throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// Not supported on non-Windows platforms
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String currentUser = System.getProperty("user.name");
|
||||||
|
|
||||||
|
// Verify that groups command returns information about the current user
|
||||||
|
// groups when invoked with no args
|
||||||
|
String outputNoArgs = Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "groups").trim();
|
||||||
|
String output = Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "groups", currentUser).trim();
|
||||||
|
assertEquals(output, outputNoArgs);
|
||||||
|
|
||||||
|
// Verify that groups command with the -F flag returns the same information
|
||||||
|
String outputFormat = Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "groups", "-F", currentUser).trim();
|
||||||
|
outputFormat = outputFormat.replace("|", " ");
|
||||||
|
assertEquals(output, outputFormat);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void chmod(String mask, File file) throws IOException {
|
||||||
|
Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "chmod", mask, file.getCanonicalPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void chmodR(String mask, File file) throws IOException {
|
||||||
|
Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "chmod", "-R", mask, file.getCanonicalPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
private String ls(File file) throws IOException {
|
||||||
|
return Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "ls", file.getCanonicalPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
private String lsF(File file) throws IOException {
|
||||||
|
return Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "ls", "-F", file.getCanonicalPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertPermissions(File file, String expected)
|
||||||
|
throws IOException {
|
||||||
|
String output = ls(file).split("[ \r\n]")[0];
|
||||||
|
assertEquals(expected, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testChmodInternal(String mode, String expectedPerm)
|
||||||
|
throws IOException {
|
||||||
|
File a = new File(TEST_DIR, "file1");
|
||||||
|
assertTrue(a.createNewFile());
|
||||||
|
|
||||||
|
// Reset permissions on the file to default
|
||||||
|
chmod("700", a);
|
||||||
|
|
||||||
|
// Apply the mode mask
|
||||||
|
chmod(mode, a);
|
||||||
|
|
||||||
|
// Compare the output
|
||||||
|
assertPermissions(a, expectedPerm);
|
||||||
|
|
||||||
|
a.delete();
|
||||||
|
assertFalse(a.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testNewFileChmodInternal(String expectedPerm) throws IOException {
|
||||||
|
// Create a new directory
|
||||||
|
File dir = new File(TEST_DIR, "dir1");
|
||||||
|
|
||||||
|
assertTrue(dir.mkdir());
|
||||||
|
|
||||||
|
// Set permission use chmod
|
||||||
|
chmod("755", dir);
|
||||||
|
|
||||||
|
// Create a child file in the directory
|
||||||
|
File child = new File(dir, "file1");
|
||||||
|
assertTrue(child.createNewFile());
|
||||||
|
|
||||||
|
// Verify the child file has correct permissions
|
||||||
|
assertPermissions(child, expectedPerm);
|
||||||
|
|
||||||
|
child.delete();
|
||||||
|
dir.delete();
|
||||||
|
assertFalse(dir.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testChmodInternalR(String mode, String expectedPerm,
|
||||||
|
String expectedPermx) throws IOException {
|
||||||
|
// Setup test folder hierarchy
|
||||||
|
File a = new File(TEST_DIR, "a");
|
||||||
|
assertTrue(a.mkdir());
|
||||||
|
chmod("700", a);
|
||||||
|
File aa = new File(a, "a");
|
||||||
|
assertTrue(aa.createNewFile());
|
||||||
|
chmod("600", aa);
|
||||||
|
File ab = new File(a, "b");
|
||||||
|
assertTrue(ab.mkdir());
|
||||||
|
chmod("700", ab);
|
||||||
|
File aba = new File(ab, "a");
|
||||||
|
assertTrue(aba.mkdir());
|
||||||
|
chmod("700", aba);
|
||||||
|
File abb = new File(ab, "b");
|
||||||
|
assertTrue(abb.createNewFile());
|
||||||
|
chmod("600", abb);
|
||||||
|
File abx = new File(ab, "x");
|
||||||
|
assertTrue(abx.createNewFile());
|
||||||
|
chmod("u+x", abx);
|
||||||
|
|
||||||
|
// Run chmod recursive
|
||||||
|
chmodR(mode, a);
|
||||||
|
|
||||||
|
// Verify outcome
|
||||||
|
assertPermissions(a, "d" + expectedPermx);
|
||||||
|
assertPermissions(aa, "-" + expectedPerm);
|
||||||
|
assertPermissions(ab, "d" + expectedPermx);
|
||||||
|
assertPermissions(aba, "d" + expectedPermx);
|
||||||
|
assertPermissions(abb, "-" + expectedPerm);
|
||||||
|
assertPermissions(abx, "-" + expectedPermx);
|
||||||
|
|
||||||
|
assertTrue(FileUtil.fullyDelete(a));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testBasicChmod() throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// Not supported on non-Windows platforms
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// - Create a file.
|
||||||
|
// - Change mode to 377 so owner does not have read permission.
|
||||||
|
// - Verify the owner truly does not have the permissions to read.
|
||||||
|
File a = new File(TEST_DIR, "a");
|
||||||
|
a.createNewFile();
|
||||||
|
chmod("377", a);
|
||||||
|
|
||||||
|
try {
|
||||||
|
readFile(a);
|
||||||
|
assertFalse("readFile should have failed!", true);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
LOG.info("Expected: Failed read from a file with permissions 377");
|
||||||
|
}
|
||||||
|
// restore permissions
|
||||||
|
chmod("700", a);
|
||||||
|
|
||||||
|
// - Create a file.
|
||||||
|
// - Change mode to 577 so owner does not have write permission.
|
||||||
|
// - Verify the owner truly does not have the permissions to write.
|
||||||
|
chmod("577", a);
|
||||||
|
|
||||||
|
try {
|
||||||
|
writeFile(a, "test");
|
||||||
|
assertFalse("writeFile should have failed!", true);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
LOG.info("Expected: Failed write to a file with permissions 577");
|
||||||
|
}
|
||||||
|
// restore permissions
|
||||||
|
chmod("700", a);
|
||||||
|
assertTrue(a.delete());
|
||||||
|
|
||||||
|
// - Copy WINUTILS to a new executable file, a.exe.
|
||||||
|
// - Change mode to 677 so owner does not have execute permission.
|
||||||
|
// - Verify the owner truly does not have the permissions to execute the file.
|
||||||
|
|
||||||
|
File winutilsFile = new File(Shell.WINUTILS);
|
||||||
|
File aExe = new File(TEST_DIR, "a.exe");
|
||||||
|
FileUtils.copyFile(winutilsFile, aExe);
|
||||||
|
chmod("677", aExe);
|
||||||
|
|
||||||
|
try {
|
||||||
|
Shell.execCommand(aExe.getCanonicalPath(), "ls");
|
||||||
|
assertFalse("executing " + aExe + " should have failed!", true);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
LOG.info("Expected: Failed to execute a file with permissions 677");
|
||||||
|
}
|
||||||
|
assertTrue(aExe.delete());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testChmod() throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// Not supported on non-Windows platforms
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
testChmodInternal("7", "-------rwx");
|
||||||
|
testChmodInternal("70", "----rwx---");
|
||||||
|
testChmodInternal("u-x,g+r,o=g", "-rw-r--r--");
|
||||||
|
testChmodInternal("u-x,g+rw", "-rw-rw----");
|
||||||
|
testChmodInternal("u-x,g+rwx-x,o=u", "-rw-rw-rw-");
|
||||||
|
testChmodInternal("+", "-rwx------");
|
||||||
|
|
||||||
|
// Recursive chmod tests
|
||||||
|
testChmodInternalR("755", "rwxr-xr-x", "rwxr-xr-x");
|
||||||
|
testChmodInternalR("u-x,g+r,o=g", "rw-r--r--", "rw-r--r--");
|
||||||
|
testChmodInternalR("u-x,g+rw", "rw-rw----", "rw-rw----");
|
||||||
|
testChmodInternalR("u-x,g+rwx-x,o=u", "rw-rw-rw-", "rw-rw-rw-");
|
||||||
|
testChmodInternalR("a+rX", "rw-r--r--", "rwxr-xr-x");
|
||||||
|
|
||||||
|
// Test a new file created in a chmod'ed directory has expected permission
|
||||||
|
testNewFileChmodInternal("-rwx------");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void chown(String userGroup, File file) throws IOException {
|
||||||
|
Shell.execCommand(
|
||||||
|
Shell.WINUTILS, "chown", userGroup, file.getCanonicalPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertOwners(File file, String expectedUser,
|
||||||
|
String expectedGroup) throws IOException {
|
||||||
|
String [] args = lsF(file).trim().split("[\\|]");
|
||||||
|
assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase());
|
||||||
|
assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout = 30000)
|
||||||
|
public void testChown() throws IOException {
|
||||||
|
if (!Shell.WINDOWS) {
|
||||||
|
// Not supported on non-Windows platforms
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
File a = new File(TEST_DIR, "a");
|
||||||
|
assertTrue(a.createNewFile());
|
||||||
|
String username = System.getProperty("user.name");
|
||||||
|
// username including the domain aka DOMAIN\\user
|
||||||
|
String qualifiedUsername = Shell.execCommand("whoami").trim();
|
||||||
|
String admins = "Administrators";
|
||||||
|
String qualifiedAdmins = "BUILTIN\\Administrators";
|
||||||
|
|
||||||
|
chown(username + ":" + admins, a);
|
||||||
|
assertOwners(a, qualifiedUsername, qualifiedAdmins);
|
||||||
|
|
||||||
|
chown(username, a);
|
||||||
|
chown(":" + admins, a);
|
||||||
|
assertOwners(a, qualifiedUsername, qualifiedAdmins);
|
||||||
|
|
||||||
|
chown(":" + admins, a);
|
||||||
|
chown(username + ":", a);
|
||||||
|
assertOwners(a, qualifiedUsername, qualifiedAdmins);
|
||||||
|
|
||||||
|
assertTrue(a.delete());
|
||||||
|
assertFalse(a.exists());
|
||||||
|
}
|
||||||
|
}
|
|
@ -107,7 +107,7 @@
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOT=`cd ${basedir}/..;pwd`
|
ROOT=`cd ../..;pwd`
|
||||||
echo
|
echo
|
||||||
echo "Current directory `pwd`"
|
echo "Current directory `pwd`"
|
||||||
echo
|
echo
|
||||||
|
@ -151,7 +151,8 @@
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
run tar czf hadoop-${project.version}.tar.gz hadoop-${project.version}
|
run tar cf hadoop-${project.version}.tar hadoop-${project.version}
|
||||||
|
run gzip hadoop-${project.version}.tar
|
||||||
echo
|
echo
|
||||||
echo "Hadoop dist tar available at: ${project.build.directory}/hadoop-${project.version}.tar.gz"
|
echo "Hadoop dist tar available at: ${project.build.directory}/hadoop-${project.version}.tar.gz"
|
||||||
echo
|
echo
|
||||||
|
|
|
@ -539,15 +539,8 @@
|
||||||
|
|
||||||
<!-- Using Unix script to preserve file permissions -->
|
<!-- Using Unix script to preserve file permissions -->
|
||||||
<echo file="${project.build.directory}/tomcat-untar.sh">
|
<echo file="${project.build.directory}/tomcat-untar.sh">
|
||||||
|
cd "${project.build.directory}/tomcat.exp"
|
||||||
which cygpath 2> /dev/null
|
gzip -cd ../../downloads/apache-tomcat-${tomcat.version}.tar.gz | tar xf -
|
||||||
if [ $? = 1 ]; then
|
|
||||||
BUILD_DIR="${project.build.directory}"
|
|
||||||
else
|
|
||||||
BUILD_DIR=`cygpath --unix '${project.build.directory}'`
|
|
||||||
fi
|
|
||||||
cd $BUILD_DIR/tomcat.exp
|
|
||||||
tar xzf ${basedir}/downloads/apache-tomcat-${tomcat.version}.tar.gz
|
|
||||||
</echo>
|
</echo>
|
||||||
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
|
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
|
||||||
<arg line="./tomcat-untar.sh"/>
|
<arg line="./tomcat-untar.sh"/>
|
||||||
|
@ -582,15 +575,8 @@
|
||||||
<target if="tar">
|
<target if="tar">
|
||||||
<!-- Using Unix script to preserve symlinks -->
|
<!-- Using Unix script to preserve symlinks -->
|
||||||
<echo file="${project.build.directory}/dist-maketar.sh">
|
<echo file="${project.build.directory}/dist-maketar.sh">
|
||||||
|
cd "${project.build.directory}"
|
||||||
which cygpath 2> /dev/null
|
tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
|
||||||
if [ $? = 1 ]; then
|
|
||||||
BUILD_DIR="${project.build.directory}"
|
|
||||||
else
|
|
||||||
BUILD_DIR=`cygpath --unix '${project.build.directory}'`
|
|
||||||
fi
|
|
||||||
cd $BUILD_DIR
|
|
||||||
tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
|
|
||||||
</echo>
|
</echo>
|
||||||
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
|
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
|
||||||
<arg line="./dist-maketar.sh"/>
|
<arg line="./dist-maketar.sh"/>
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
branch-trunk-win changes - unreleased
|
||||||
|
|
||||||
|
HDFS-4145. Merge hdfs cmd line scripts from branch-1-win. (David Lao,
|
||||||
|
Bikas Saha, Lauren Yang, Chuan Liu, Thejas M Nair and Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
HDFS-4163. HDFS distribution build fails on Windows. (Chris Nauroth via
|
||||||
|
suresh)
|
||||||
|
|
||||||
|
HDFS-4316. branch-trunk-win contains test code accidentally added during
|
||||||
|
work on fixing tests on Windows. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
|
HDFS-4297. Fix issues related to datanode concurrent reading and writing on
|
||||||
|
Windows. (Arpit Agarwal, Chuan Liu via suresh)
|
|
@ -511,6 +511,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<exclude>CHANGES.txt</exclude>
|
<exclude>CHANGES.txt</exclude>
|
||||||
<exclude>CHANGES.HDFS-1623.txt</exclude>
|
<exclude>CHANGES.HDFS-1623.txt</exclude>
|
||||||
<exclude>CHANGES.HDFS-347.txt</exclude>
|
<exclude>CHANGES.HDFS-347.txt</exclude>
|
||||||
|
<exclude>CHANGES.branch-trunk-win.txt</exclude>
|
||||||
<exclude>.idea/**</exclude>
|
<exclude>.idea/**</exclude>
|
||||||
<exclude>src/main/conf/*</exclude>
|
<exclude>src/main/conf/*</exclude>
|
||||||
<exclude>src/main/docs/**</exclude>
|
<exclude>src/main/docs/**</exclude>
|
||||||
|
|
|
@ -146,9 +146,6 @@ else
|
||||||
CLASS="$COMMAND"
|
CLASS="$COMMAND"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if $cygwin; then
|
|
||||||
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
|
||||||
fi
|
|
||||||
export CLASSPATH=$CLASSPATH
|
export CLASSPATH=$CLASSPATH
|
||||||
|
|
||||||
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
|
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
|
||||||
|
@rem included in all the hdfs scripts with source command
|
||||||
|
@rem should not be executed directly
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
if exist %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd (
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
|
||||||
|
) else if exist %HADOOP_COMMON_HOME%\libexec\hadoop-config.cmd (
|
||||||
|
call %HADOOP_COMMON_HOME%\libexec\hadoop-config.cmd %*
|
||||||
|
) else if exist %HADOOP_HOME%\libexec\hadoop-config.cmd (
|
||||||
|
call %HADOOP_HOME%\libexec\hadoop-config.cmd %*
|
||||||
|
) else (
|
||||||
|
echo Hadoop common not found.
|
||||||
|
)
|
||||||
|
|
||||||
|
:eof
|
|
@ -0,0 +1,171 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
@rem
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hdfs-config.cmd %*
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
:main
|
||||||
|
if exist %HADOOP_CONF_DIR%\hadoop-env.cmd (
|
||||||
|
call %HADOOP_CONF_DIR%\hadoop-env.cmd
|
||||||
|
)
|
||||||
|
|
||||||
|
set hdfs-command=%1
|
||||||
|
call :make_command_arguments %*
|
||||||
|
|
||||||
|
if not defined hdfs-command (
|
||||||
|
goto print_usage
|
||||||
|
)
|
||||||
|
|
||||||
|
call :%hdfs-command% %hdfs-command-arguments%
|
||||||
|
set java_arguments=%JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hdfs-command-arguments%
|
||||||
|
call %JAVA% %java_arguments%
|
||||||
|
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:namenode
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.server.namenode.NameNode
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_NAMENODE_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:zkfc
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.DFSZKFailoverController
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_ZKFC_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:secondarynamenode
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_SECONDARYNAMENODE_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:datanode
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.server.datanode.DataNode
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% -server %HADOOP_DATANODE_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:dfs
|
||||||
|
set CLASS=org.apache.hadoop.fs.FsShell
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:dfsadmin
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:haadmin
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
|
||||||
|
set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:fsck
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.DFSck
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:balancer
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
|
||||||
|
set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_BALANCER_OPTS%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:jmxget
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.JMXGet
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:oiv
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:oev
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:fetchdt
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:getconf
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.GetConf
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:groups
|
||||||
|
set CLASS=org.apache.hadoop.hdfs.tools.GetGroups
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
@rem This changes %1, %2 etc. Hence those cannot be used after calling this.
|
||||||
|
:make_command_arguments
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
if [%2] == [] goto :eof
|
||||||
|
shift
|
||||||
|
set _hdfsarguments=
|
||||||
|
:MakeCmdArgsLoop
|
||||||
|
if [%1]==[] goto :EndLoop
|
||||||
|
|
||||||
|
if not defined _hdfsarguments (
|
||||||
|
set _hdfsarguments=%1
|
||||||
|
) else (
|
||||||
|
set _hdfsarguments=!_hdfsarguments! %1
|
||||||
|
)
|
||||||
|
shift
|
||||||
|
goto :MakeCmdArgsLoop
|
||||||
|
:EndLoop
|
||||||
|
set hdfs-command-arguments=%_hdfsarguments%
|
||||||
|
goto :eof
|
||||||
|
|
||||||
|
:print_usage
|
||||||
|
@echo Usage: hdfs [--config confdir] COMMAND
|
||||||
|
@echo where COMMAND is one of:
|
||||||
|
@echo dfs run a filesystem command on the file systems supported in Hadoop.
|
||||||
|
@echo namenode -format format the DFS filesystem
|
||||||
|
@echo secondarynamenode run the DFS secondary namenode
|
||||||
|
@echo namenode run the DFS namenode
|
||||||
|
@echo zkfc run the ZK Failover Controller daemon
|
||||||
|
@echo datanode run a DFS datanode
|
||||||
|
@echo dfsadmin run a DFS admin client
|
||||||
|
@echo fsck run a DFS filesystem checking utility
|
||||||
|
@echo balancer run a cluster balancing utility
|
||||||
|
@echo jmxget get JMX exported values from NameNode or DataNode.
|
||||||
|
@echo oiv apply the offline fsimage viewer to an fsimage
|
||||||
|
@echo oev apply the offline edits viewer to an edits file
|
||||||
|
@echo fetchdt fetch a delegation token from the NameNode
|
||||||
|
@echo getconf get config values from configuration
|
||||||
|
@echo groups get the groups which users belong to
|
||||||
|
@echo Use -help to see options
|
||||||
|
@echo.
|
||||||
|
@echo Most commands print help when invoked w/o parameters.
|
||||||
|
|
||||||
|
endlocal
|
|
@ -0,0 +1,41 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
@rem
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hdfs-config.cmd %*
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
start "Apache Hadoop Distribution" hadoop namenode
|
||||||
|
start "Apache Hadoop Distribution" hadoop datanode
|
||||||
|
|
||||||
|
endlocal
|
|
@ -0,0 +1,41 @@
|
||||||
|
@echo off
|
||||||
|
@rem Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@rem contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@rem this work for additional information regarding copyright ownership.
|
||||||
|
@rem The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
@rem (the "License"); you may not use this file except in compliance with
|
||||||
|
@rem the License. You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
@rem
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
if not defined HADOOP_BIN_PATH (
|
||||||
|
set HADOOP_BIN_PATH=%~dp0
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%HADOOP_BIN_PATH:~-1%" == "\" (
|
||||||
|
set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
|
||||||
|
)
|
||||||
|
|
||||||
|
set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
|
||||||
|
if not defined HADOOP_LIBEXEC_DIR (
|
||||||
|
set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
|
||||||
|
)
|
||||||
|
|
||||||
|
call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
|
||||||
|
if "%1" == "--config" (
|
||||||
|
shift
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
|
||||||
|
Taskkill /FI "WINDOWTITLE eq Apache Hadoop Distribution - hadoop namenode"
|
||||||
|
Taskkill /FI "WINDOWTITLE eq Apache Hadoop Distribution - hadoop datanode"
|
||||||
|
|
||||||
|
endlocal
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue